Carma-platform v4.2.0
CARMA Platform is built on robot operating system (ROS) and utilizes open source software (OSS) that enables Cooperative Driving Automation (CDA) features to allow Automated Driving Systems to interact and cooperate with infrastructure and other vehicles through communication.
psm_to_external_object_convertor.cpp
Go to the documentation of this file.
1// Copyright 2019-2023 Leidos
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#include <lanelet2_extension/regulatory_elements/CarmaTrafficSignal.h>
16#include <tf2/LinearMath/Transform.h>
17#include <wgs84_utils/wgs84_utils.h>
18
19#include <algorithm>
20#include <string>
21#include <vector>
22
23#include <boost/date_time/posix_time/posix_time.hpp>
26#include <rclcpp/logger.hpp>
27#include <rclcpp/logging.hpp>
28#include <rclcpp/rclcpp.hpp>
29
30#include <carma_perception_msgs/msg/external_object.hpp>
31#include <carma_v2x_msgs/msg/psm.hpp>
32#include <tf2_geometry_msgs/tf2_geometry_msgs.hpp>
33
34namespace motion_computation
35{
36namespace conversion
37{
39 const carma_v2x_msgs::msg::PSM & in_msg, carma_perception_msgs::msg::ExternalObject & out_msg,
40 const std::string & map_frame_id, double pred_period, double pred_step_size,
41
42 const lanelet::projection::LocalFrameProjector & map_projector,
43 const tf2::Quaternion & ned_in_map_rotation,
44 rclcpp::node_interfaces::NodeClockInterface::SharedPtr node_clock)
45{
47 out_msg.dynamic_obj = true; // If a PSM is sent then the object is dynamic
48 // since its a living thing
49 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::DYNAMIC_OBJ_PRESENCE;
50
52 // Generate a unique object id from the psm id
53 out_msg.id = 0;
54 for (int i = in_msg.id.id.size() - 1; i >= 0;
55 i--) { // using signed iterator to handle empty case
56 // each byte of the psm id gets placed in one byte of the object id.
57 // This should result in very large numbers which will be unlikely to
58 // conflict with standard detections
59 out_msg.id |= in_msg.id.id[i] << (8 * i);
60 }
61 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::ID_PRESENCE_VECTOR;
62
64 // Additionally, store the id in the bsm_id field
65 out_msg.bsm_id = in_msg.id.id;
66 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::BSM_ID_PRESENCE_VECTOR;
67
69 // Compute the timestamp
70
71 auto psm_timestamp = impl::get_psm_timestamp(in_msg, node_clock->get_clock());
72 out_msg.header.stamp = builtin_interfaces::msg::Time(psm_timestamp);
73 out_msg.header.frame_id = map_frame_id;
74
76 // Set the type
77
78 if (
79 in_msg.basic_type.type == j2735_v2x_msgs::msg::PersonalDeviceUserType::A_PEDESTRIAN ||
80 in_msg.basic_type.type == j2735_v2x_msgs::msg::PersonalDeviceUserType::A_PUBLIC_SAFETY_WORKER ||
81 in_msg.basic_type.type ==
82 j2735_v2x_msgs::msg::PersonalDeviceUserType::AN_ANIMAL) // Treat animals
83 // like people
84 // since we have
85 // no internal
86 // class for that
87 {
88 out_msg.object_type = carma_perception_msgs::msg::ExternalObject::PEDESTRIAN;
89
90 // Default pedestrian size
91 // Assume a
92 // ExternalObject dimensions are half actual size
93 // Here we assume 1.0, 1.0, 2.0
94 out_msg.size.x = 0.5;
95 out_msg.size.y = 0.5;
96 out_msg.size.z = 1.0;
97
98 } else if (
99 in_msg.basic_type.type == j2735_v2x_msgs::msg::PersonalDeviceUserType::A_PEDALCYCLIST) {
100 out_msg.object_type = carma_perception_msgs::msg::ExternalObject::
101 MOTORCYCLE; // Currently external object cannot represent bicycles,
102 // but motor cycle seems like the next best choice
103
104 // Default bicycle size
105 out_msg.size.x = 1.0;
106 out_msg.size.y = 0.5;
107 out_msg.size.z = 1.0;
108 } else {
109 out_msg.object_type = carma_perception_msgs::msg::ExternalObject::UNKNOWN;
110
111 // Default pedestrian size
112 out_msg.size.x = 0.5;
113 out_msg.size.y = 0.5;
114 out_msg.size.z = 1.0;
115 }
116 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::SIZE_PRESENCE_VECTOR;
117
119 // Set the velocity
120
121 out_msg.velocity.twist.linear.x = in_msg.speed.velocity;
122 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::VELOCITY_PRESENCE_VECTOR;
123 // NOTE: The velocity covariance is not provided in the PSM. In order to
124 // compute it you need at least two PSM messages
125 // Tracking and associating PSM messages would be an increase in
126 // complexity for this conversion which is not warranted without an
127 // existing use case for the velocity covariance. If a use case is
128 // presented for it, such an addition can be made at that time.
129
131
132 double largest_position_std = 0.0;
133 double lat_variance = 0.0;
134 double lon_variance = 0.0;
135 double heading_variance = 0.0; // Yaw variance is not available so mark as
136 // impossible perfect case
137 double position_confidence = 0.0; // Default will be 10% confidence. If the position accuracy is
138 // available then this value will be updated
139
140 // A standard deviation which is larger than the acceptable value to give
141 // 95% confidence interval on fitting the pedestrian within one 3.7m lane
142 constexpr double MAX_POSITION_STD = 1.85;
143
144 if (
145 (in_msg.accuracy.presence_vector |
146 carma_v2x_msgs::msg::PositionalAccuracy::ACCURACY_AVAILABLE) &&
147 (in_msg.accuracy.presence_vector |
148 carma_v2x_msgs::msg::PositionalAccuracy::ACCURACY_ORIENTATION_AVAILABLE)) {
149 // Both accuracies available
150
151 // Compute the position covariance
152 // For computing confidence we will use the largest provided standard
153 // deviation of position
154 largest_position_std = std::max(in_msg.accuracy.semi_major, in_msg.accuracy.semi_minor);
155 lat_variance = in_msg.accuracy.semi_minor * in_msg.accuracy.semi_minor;
156 lon_variance = in_msg.accuracy.semi_major * in_msg.accuracy.semi_major;
157 heading_variance = in_msg.accuracy.orientation * in_msg.accuracy.orientation;
158
159 // NOTE: ExternalObject.msg does not clearly define what is meant by
160 // position confidence
161 // Here we are providing a linear scale based on the positional accuracy
162 // where 0 confidence would denote A standard deviation which is larger
163 // than the acceptable value to give 95% confidence interval on fitting
164 // the pedestrian within one 3.7m lane
165 // Set the confidence
166 // Without a way of getting the velocity confidence from the PSM we will use
167 // the position confidence for both
168 out_msg.confidence = 1.0 - std::min(1.0, fabs(largest_position_std / MAX_POSITION_STD));
169 out_msg.presence_vector |=
170 carma_perception_msgs::msg::ExternalObject::CONFIDENCE_PRESENCE_VECTOR;
171 } else if (
172 in_msg.accuracy.presence_vector | carma_v2x_msgs::msg::PositionalAccuracy::ACCURACY_AVAILABLE) {
173 // Position accuracy available
174
175 largest_position_std = std::max(in_msg.accuracy.semi_major, in_msg.accuracy.semi_minor);
176 lat_variance = in_msg.accuracy.semi_minor * in_msg.accuracy.semi_minor;
177 lon_variance = in_msg.accuracy.semi_major * in_msg.accuracy.semi_major;
178 heading_variance = 1.0; // Yaw variance is not available so mark as
179 // impossible perfect case
180
181 // Same calculation as shown in above condition. See that for description
182 out_msg.confidence = 1.0 - std::min(1.0, fabs(largest_position_std / MAX_POSITION_STD));
183 out_msg.presence_vector |=
184 carma_perception_msgs::msg::ExternalObject::CONFIDENCE_PRESENCE_VECTOR;
185
186 } else if (
187 in_msg.accuracy.presence_vector |
188 carma_v2x_msgs::msg::PositionalAccuracy::ACCURACY_ORIENTATION_AVAILABLE) {
189 // Orientation accuracy available
190
191 lat_variance = 1.0;
192 lon_variance = 1.0;
193
194 heading_variance = in_msg.accuracy.orientation * in_msg.accuracy.orientation;
195 } else { // No accuracies available
196 lat_variance = 1.0;
197 lon_variance = 1.0;
198 heading_variance = 1.0;
199 }
200
202 // Compute the pose
203 lanelet::GPSPoint gps_point(
204 {in_msg.position.latitude, in_msg.position.longitude, in_msg.position.elevation});
205 out_msg.pose = impl::pose_from_gnss(
206 map_projector, ned_in_map_rotation, gps_point, in_msg.heading.heading, lat_variance,
207 lon_variance, heading_variance);
208 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::POSE_PRESENCE_VECTOR;
209
211 // Compute predictions
212 // For prediction, if the prediction is available we will sample it
213 // If not then assume linear motion
214
215 std::vector<geometry_msgs::msg::Pose> predicted_poses;
216
217 if (in_msg.presence_vector & carma_v2x_msgs::msg::PSM::HAS_PATH_PREDICTION) {
218 // Based on the vehicle frame used in j2735 positive should be to the right
219 // and negative to the left
220 predicted_poses = impl::sample_2d_path_from_radius(
221 out_msg.pose.pose, out_msg.velocity.twist.linear.x,
222 -in_msg.path_prediction.radius_of_curvature, pred_period, pred_step_size);
223 } else {
224 predicted_poses = impl::sample_2d_linear_motion(
225 out_msg.pose.pose, out_msg.velocity.twist.linear.x, pred_period, pred_step_size);
226 }
227
228 out_msg.predictions = impl::predicted_poses_to_predicted_state(
229 predicted_poses, out_msg.velocity.twist.linear.x, rclcpp::Time(out_msg.header.stamp),
230 rclcpp::Duration::from_nanoseconds(pred_step_size * 1e9), map_frame_id, out_msg.confidence,
231 out_msg.confidence);
232 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::PREDICTION_PRESENCE_VECTOR;
233}
234
235namespace impl
236{
237std::vector<geometry_msgs::msg::Pose> sample_2d_path_from_radius(
238 const geometry_msgs::msg::Pose & pose, double velocity, double radius_of_curvature, double period,
239 double step_size)
240{
241 std::vector<geometry_msgs::msg::Pose> output;
242 output.reserve((period / step_size) + 1);
243
244 tf2::Vector3 pose_in_map_translation(pose.position.x, pose.position.y, pose.position.z);
245 tf2::Quaternion pose_in_map_quat(
246 pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w);
247 tf2::Transform pose_in_map(pose_in_map_quat, pose_in_map_translation);
248
249 // The radius of curvature originates from the frame of the provided pose
250 // So the turning center is at (0, r)
251 double center_x_in_pose = 0;
252 double center_y_in_pose = radius_of_curvature;
253
254 double total_dt = 0;
255
256 while (total_dt < period) {
257 // Compute the 2d position and orientation in the Pose frame
258 total_dt += step_size;
259 double delta_arc_length = velocity * total_dt; // Assumes perfect point motion along curve
260
261 double turning_angle = delta_arc_length / radius_of_curvature;
262 double dx_from_center = radius_of_curvature * sin(turning_angle);
263 double dy_from_center = radius_of_curvature * cos(turning_angle);
264
265 double x = center_x_in_pose + dx_from_center;
266 double y = center_y_in_pose + dy_from_center;
267
268 tf2::Vector3 position(x, y, 0);
269
270 tf2::Quaternion quat;
271 quat.setRPY(0, 0, turning_angle);
272
273 // Convert the position and orientation in the pose frame to the map frame
274 tf2::Transform pose_to_sample(quat, position);
275 tf2::Transform map_to_sample = pose_in_map * pose_to_sample;
276
277 geometry_msgs::msg::Pose sample_pose;
278
279 sample_pose.position.x = map_to_sample.getOrigin().x();
280 sample_pose.position.y = map_to_sample.getOrigin().y();
281 sample_pose.position.z = pose.orientation.z; // Reuse the z position from the initial pose
282
283 sample_pose.orientation.x = map_to_sample.getRotation().x();
284 sample_pose.orientation.y = map_to_sample.getRotation().y();
285 sample_pose.orientation.z = map_to_sample.getRotation().z();
286 sample_pose.orientation.w = map_to_sample.getRotation().w();
287
288 output.emplace_back(sample_pose);
289 }
290
291 return output;
292}
293
294std::vector<geometry_msgs::msg::Pose> sample_2d_linear_motion(
295 const geometry_msgs::msg::Pose & pose, double velocity, double period, double step_size)
296{
297 std::vector<geometry_msgs::msg::Pose> output;
298 output.reserve((period / step_size) + 1);
299
300 tf2::Vector3 pose_in_map_translation(pose.position.x, pose.position.y, pose.position.z);
301 tf2::Quaternion pose_in_map_quat(
302 pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w);
303
304 tf2::Transform pose_in_map(pose_in_map_quat, pose_in_map_translation);
305
306 double total_dt = 0;
307
308 while (total_dt < period) {
309 // Increment time
310 total_dt += step_size;
311 double distance = velocity * total_dt; // Total distance traveled
312
313 // Create a transform that moves forward along the local x-axis by 'distance'
314 tf2::Transform pose_to_sample(tf2::Quaternion::getIdentity(), tf2::Vector3(distance, 0, 0));
315
316 // Transform to map coordinates
317 tf2::Transform map_to_sample = pose_in_map * pose_to_sample;
318
319 geometry_msgs::msg::Pose sample_pose;
320
321 sample_pose.position.x = map_to_sample.getOrigin().x();
322 sample_pose.position.y = map_to_sample.getOrigin().y();
323 sample_pose.position.z = map_to_sample.getOrigin().z();
324
325 sample_pose.orientation.x = map_to_sample.getRotation().x();
326 sample_pose.orientation.y = map_to_sample.getRotation().y();
327 sample_pose.orientation.z = map_to_sample.getRotation().z();
328 sample_pose.orientation.w = map_to_sample.getRotation().w();
329
330 output.emplace_back(sample_pose);
331 }
332
333 return output;
334}
335
336// NOTE heading will need to be set after calling this
337
338geometry_msgs::msg::PoseWithCovariance pose_from_gnss(
339 const lanelet::projection::LocalFrameProjector & projector,
340 const tf2::Quaternion & ned_in_map_rotation, const lanelet::GPSPoint & gps_point,
341 const double & heading, const double lat_variance, const double lon_variance,
342 const double heading_variance)
343{
346 lanelet::BasicPoint3d map_point = projector.forward(gps_point);
347
348 RCLCPP_DEBUG_STREAM(
349 rclcpp::get_logger("motion_computation::conversion"),
350 "map_point: " << map_point.x() << ", " << map_point.y() << ", " << map_point.z());
351
352 if (
353 fabs(map_point.x()) > 10000.0 ||
354 fabs(map_point.y()) > 10000.0) { // Above 10km from map origin earth curvature will start to
355 // have a negative impact on system performance
356
357 RCLCPP_WARN_STREAM(
358 rclcpp::get_logger("motion_computation::conversion"),
359 "Distance from map origin is larger than supported by system "
360 "assumptions. Strongly advise "
361 "alternative map origin be used. ");
362 }
363
365 // This logic assumes that the orientation difference between an NED frame
366 // located at the map origin and an NED frame located at the GNSS point are
367 // sufficiently small that they can be ignored. Therefore it is assumed the
368 // heading report of the GNSS system regardless of its postion in the map
369 // without change in its orientation will give the same result (as far as we
370 // are concered).
371
372 tf2::Quaternion R_m_n(ned_in_map_rotation); // Rotation of NED frame in map
373 // frame
374 tf2::Quaternion R_n_h; // Rotation of sensor heading report in NED frame
375 R_n_h.setRPY(0, 0, heading * wgs84_utils::DEG2RAD);
376
377 tf2::Quaternion R_m_s = R_m_n * R_n_h; // Rotation of sensor in map frame under assumption that
378 // distance from map origin is sufficiently small so as to
379 // ignore local changes in NED orientation
380
381 RCLCPP_DEBUG_STREAM(
382 rclcpp::get_logger("motion_computation::conversion"),
383 "R_m_n (x,y,z,w) : ( " << R_m_n.x() << ", " << R_m_n.y() << ", " << R_m_n.z() << ", "
384 << R_m_n.w());
385
386 RCLCPP_DEBUG_STREAM(
387 rclcpp::get_logger("motion_computation::conversion"),
388 "R_n_h (x,y,z,w) : ( " << R_n_h.x() << ", " << R_n_h.y() << ", " << R_n_h.z() << ", "
389 << R_n_h.w());
390
391 RCLCPP_DEBUG_STREAM(
392 rclcpp::get_logger("motion_computation::conversion"),
393 "R_m_s (x,y,z,w) : ( " << R_m_s.x() << ", " << R_m_s.y() << ", " << R_m_s.z() << ", "
394 << R_m_s.w());
395
396 tf2::Transform T_m_s(
397 R_m_s, tf2::Vector3(
398 map_point.x(), map_point.y(),
399 map_point.z())); // Reported position and orientation
400 // of sensor frame in map frame
401
402 tf2::Transform T_m_n_no_heading(
403 R_m_n, tf2::Vector3(0, 0, 0)); // Used to transform the covariance
404
405 // This covariance represents the covariance of the NED frame N-lat,
406 // E-lon, heading- angle east of north This means that the covariance is
407 // in the NED frame, and needs to be transformed to the map frame
408 // clang-format off
409 std::array<double, 36> input_covariance = {
410 lat_variance, 0, 0, 0, 0, 0,
411 0, lon_variance, 0, 0, 0, 0,
412 0, 0, 1, 0, 0, 0,
413 0, 0, 0, 1, 0, 0,
414 0, 0, 0, 0, 1, 0,
415 0, 0, 0, 0, 0, heading_variance
416 };
417 // clang-format on
418
419 std::array<double, 36> new_cov = tf2::transformCovariance(
420 input_covariance,
421 // Per the usage of transformCovariance in tf2_geometry_msgs
422 // this frame should be the transform between the map which the pose
423 // is in an the target frame.
424 T_m_n_no_heading);
425
426 // Populate message
427 geometry_msgs::msg::PoseWithCovariance pose;
428 pose.pose.position.x = T_m_s.getOrigin().getX();
429 pose.pose.position.y = T_m_s.getOrigin().getY();
430 pose.pose.position.z = T_m_s.getOrigin().getZ();
431
432 pose.pose.orientation.x = T_m_s.getRotation().getX();
433 pose.pose.orientation.y = T_m_s.getRotation().getY();
434 pose.pose.orientation.z = T_m_s.getRotation().getZ();
435 pose.pose.orientation.w = T_m_s.getRotation().getW();
436
437 pose.covariance = new_cov;
438
439 return pose;
440}
441
442std::vector<carma_perception_msgs::msg::PredictedState> predicted_poses_to_predicted_state(
443 const std::vector<geometry_msgs::msg::Pose> & poses, double constant_velocity,
444 const rclcpp::Time & start_time, const rclcpp::Duration & step_size, const std::string & frame,
445 double initial_pose_confidence, double initial_vel_confidence)
446{
447 std::vector<carma_perception_msgs::msg::PredictedState> output;
448 output.reserve(poses.size());
449
450 rclcpp::Time time(start_time);
451
452 for (auto p : poses) {
453 time += step_size;
454 carma_perception_msgs::msg::PredictedState pred_state;
455 pred_state.header.stamp = time;
456 pred_state.header.frame_id = frame;
457
458 pred_state.predicted_position = p;
459 pred_state.predicted_position_confidence =
460 0.9 * initial_pose_confidence; // Reduce confidence by 10 % per timestep
461 initial_pose_confidence = pred_state.predicted_position_confidence;
462
463 pred_state.predicted_velocity.linear.x = constant_velocity;
464 pred_state.predicted_velocity_confidence =
465 0.9 * initial_vel_confidence; // Reduce confidence by
466 // 10 % per timestep
467 initial_vel_confidence = pred_state.predicted_velocity_confidence;
468
469 output.push_back(pred_state);
470 }
471
472 return output;
473}
474
475rclcpp::Time get_psm_timestamp(
476 const carma_v2x_msgs::msg::PSM & in_msg, rclcpp::Clock::SharedPtr clock)
477{
478 boost::posix_time::ptime utc_time_of_current_psm;
479
480 // Get the utc epoch start time
481 static const boost::posix_time::ptime inception_boost(
482 boost::posix_time::time_from_string("1970-01-01 00:00:00.000"));
483
484 // Determine if the utc time of the path history can be used instead of the
485 // sec_mark The sec_mark is susceptible to large error on minute transitions
486 // due to missing "minute of the year" field If the second mark in the path
487 // history is identical and the full utc time is provided with ms resolution
488 // then it can be assumed the initial_position is the same as the PSM data and
489 // the utc_time can be used instead of sec_mark
490 // clang-format off
491
492 const auto initial_position_time{in_msg.path_history.initial_position.utc_time};
493 if ((in_msg.presence_vector &
494 carma_v2x_msgs::msg::PSM::HAS_PATH_HISTORY) &&
495 (in_msg.path_history.presence_vector &
496 carma_v2x_msgs::msg::PathHistory::HAS_INITIAL_POSITION) &&
497 (initial_position_time.presence_vector & j2735_v2x_msgs::msg::DDateTime::YEAR) &&
498 (initial_position_time.presence_vector & j2735_v2x_msgs::msg::DDateTime::MONTH) &&
499 (initial_position_time.presence_vector & j2735_v2x_msgs::msg::DDateTime::DAY) &&
500 (initial_position_time.presence_vector & j2735_v2x_msgs::msg::DDateTime::HOUR) &&
501 (initial_position_time.presence_vector & j2735_v2x_msgs::msg::DDateTime::MINUTE) &&
502 (initial_position_time.presence_vector & j2735_v2x_msgs::msg::DDateTime::SECOND) &&
503 (in_msg.sec_mark.millisecond == initial_position_time.second.millisecond)) {
504 // clang-format on
505 RCLCPP_DEBUG_STREAM(
506 rclcpp::get_logger("motion_computation::conversion"),
507 "Using UTC time of path history to determine PSM "
508 "timestamp. Assumed valid since UTC is fully specified "
509 "and sec_mark == utc_time.seconds in this message.");
510
511 boost::posix_time::time_duration time_of_day =
512 boost::posix_time::hours(in_msg.path_history.initial_position.utc_time.hour.hour) +
513 boost::posix_time::minutes(in_msg.path_history.initial_position.utc_time.minute.minute) +
514 boost::posix_time::milliseconds(
515 in_msg.path_history.initial_position.utc_time.second.millisecond);
516
517 boost::gregorian::date utc_day(
518 in_msg.path_history.initial_position.utc_time.year.year,
519 in_msg.path_history.initial_position.utc_time.month.month,
520 in_msg.path_history.initial_position.utc_time.day.day);
521
522 utc_time_of_current_psm = boost::posix_time::ptime(utc_day) + time_of_day;
523 } else { // If the utc time of the path history cannot be used to account for minute
524 // change over, then we have to default to the sec mark
525
526 RCLCPP_WARN_STREAM_THROTTLE(
527 rclcpp::get_logger("motion_computation::conversion"), *clock.get(),
528 rclcpp::Duration(5, 0).nanoseconds(),
529 "PSM PathHistory utc timstamp does not match "
530 "sec_mark. Unable to determine the minute of "
531 "the year used for PSM data. Assuming local "
532 "clock is exactly synced. This is NOT "
533 "ADVISED.");
534
535 // Get the current ROS time
536 auto current_time = clock->now();
537
538 // Convert the ros time to a boost duration
539 boost::posix_time::time_duration duration_since_inception(
540 lanelet::time::durationFromSec(current_time.seconds()));
541
542 // Get the current ROS time in UTC
543 auto curr_time_boost = inception_boost + duration_since_inception;
544
545 // Get duration of current day
546 auto duration_in_day_till_current_time = curr_time_boost.time_of_day();
547
548 // Extract hours and minutes
549 auto hour_count_in_day = duration_in_day_till_current_time.hours();
550 auto minute_count_in_hour = duration_in_day_till_current_time.minutes();
551
552 // Get the duration of the minute in the day
553 auto start_of_minute_in_day = boost::posix_time::hours(hour_count_in_day) +
554 boost::posix_time::minutes(minute_count_in_hour);
555
556 // Get the start of the day in ROS time
557 boost::posix_time::ptime start_of_day(curr_time_boost.date());
558
559 // Ge the start of the current minute in ROS time
560 boost::posix_time::ptime utc_start_of_current_minute = start_of_day + start_of_minute_in_day;
561
562 // Compute the UTC PSM stamp from the sec_mark using ROS time as the clock
563
564 boost::posix_time::time_duration s_in_cur_minute =
565 boost::posix_time::milliseconds(in_msg.sec_mark.millisecond);
566
567 utc_time_of_current_psm = utc_start_of_current_minute + s_in_cur_minute;
568 }
569
570 boost::posix_time::time_duration nsec_since_epoch = utc_time_of_current_psm - inception_boost;
571
572 if (nsec_since_epoch.is_special()) {
573 RCLCPP_ERROR_STREAM(
574 rclcpp::get_logger("motion_computation::conversion"),
575 "Computed psm nsec_since_epoch is special (computation "
576 "failed). Value effectively undefined.");
577 }
578
579 return rclcpp::Time(nsec_since_epoch.total_nanoseconds());
580}
581} // namespace impl
582
583} // namespace conversion
584} // namespace motion_computation
geometry_msgs::msg::PoseWithCovariance::_covariance_type transformCovariance(const geometry_msgs::msg::PoseWithCovariance::_covariance_type &cov_in, const tf2::Transform &transform)
Transform the covariance matrix of a PoseWithCovariance message to a new frame.
std::vector< geometry_msgs::msg::Pose > sample_2d_path_from_radius(const geometry_msgs::msg::Pose &pose, double velocity, double radius_of_curvature, double period, double step_size)
geometry_msgs::msg::PoseWithCovariance pose_from_gnss(const lanelet::projection::LocalFrameProjector &projector, const tf2::Quaternion &ned_in_map_rotation, const lanelet::GPSPoint &gps_point, const double &heading, const double lat_variance, const double lon_variance, const double heading_variance)
std::vector< carma_perception_msgs::msg::PredictedState > predicted_poses_to_predicted_state(const std::vector< geometry_msgs::msg::Pose > &poses, double constant_velocity, const rclcpp::Time &start_time, const rclcpp::Duration &step_size, const std::string &frame, double initial_pose_confidence, double initial_vel_confidence)
rclcpp::Time get_psm_timestamp(const carma_v2x_msgs::msg::PSM &in_msg, rclcpp::Clock::SharedPtr clock)
std::vector< geometry_msgs::msg::Pose > sample_2d_linear_motion(const geometry_msgs::msg::Pose &pose, double velocity, double period, double step_size)
void convert(const carma_v2x_msgs::msg::PSM &in_msg, carma_perception_msgs::msg::ExternalObject &out_msg, const std::string &map_frame_id, double pred_period, double pred_step_size, const lanelet::projection::LocalFrameProjector &map_projector, const tf2::Quaternion &ned_in_map_rotation, rclcpp::node_interfaces::NodeClockInterface::SharedPtr node_clock)