Carma-platform v4.10.0
CARMA Platform is built on robot operating system (ROS) and utilizes open source software (OSS) that enables Cooperative Driving Automation (CDA) features to allow Automated Driving Systems to interact and cooperate with infrastructure and other vehicles through communication.
msg_conversion.cpp
Go to the documentation of this file.
1// Copyright 2023 Leidos
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
16
17#include <carma_cooperative_perception_interfaces/msg/track.hpp>
18#include <carma_cooperative_perception_interfaces/msg/track_list.hpp>
19#include <carma_perception_msgs/msg/external_object.hpp>
20#include <carma_perception_msgs/msg/external_object_list.hpp>
21#include <j2735_v2x_msgs/msg/d_date_time.hpp>
22#include <j2735_v2x_msgs/msg/personal_device_user_type.hpp>
23#include <j2735_v2x_msgs/msg/positional_accuracy.hpp>
24#include <j2735_v2x_msgs/to_floating_point.hpp>
25#include <j3224_v2x_msgs/msg/detected_object_data.hpp>
26#include <j3224_v2x_msgs/msg/equipment_type.hpp>
27#include <j3224_v2x_msgs/msg/measurement_time_offset.hpp>
28#include <j3224_v2x_msgs/msg/object_type.hpp>
29#include <tf2_geometry_msgs/tf2_geometry_msgs.hpp>
30
31#include <algorithm>
32#include <cctype>
33#include <charconv>
34#include <chrono>
35#include <cmath>
36#include <limits>
37#include <numeric>
38#include <string>
39#include <utility>
40#include <cstdlib> // for setenv, unsetenv
41#include <ctime> // for tzset
42#include <fmt/format.h>
43
44#include <proj.h>
45#include <gsl/pointers>
46#include <memory>
47
52
53#include <lanelet2_core/geometry/Lanelet.h>
54#include <lanelet2_extension/projection/local_frame_projector.h>
55#include <boost/date_time/posix_time/conversion.hpp>
56#include <boost/date_time/posix_time/posix_time_io.hpp>
57#include "boost/date_time/posix_time/posix_time.hpp"
58
60{
61auto to_time_msg(const DDateTime & d_date_time, bool is_simulation) -> builtin_interfaces::msg::Time
62{
63 // Convert DDateTime to builtin_interfaces::msg::Time
64 builtin_interfaces::msg::Time msg;
65 if (!is_simulation) {
66 // Create a tm structure to hold the date and time components
67 std::tm timeinfo = {};
68
69 // Year
70 if (d_date_time.year){
71 if(remove_units(d_date_time.year.value()) >= 1970){
72 // std::tm is counted since 1900
73 timeinfo.tm_year = static_cast<int>(remove_units(d_date_time.year.value())) - 1900;
74 }
75 else{
76 throw std::invalid_argument(
77 "Year must be greater than 1970 for live date/time conversion");
78 }
79 }
80
81 // Month
82 if (d_date_time.month && static_cast<int>(d_date_time.month.value().get_value()) != 0)
83 {
84 // std::tm is counted from 0 to 11, J2735 is counted from 1 to 12
85 timeinfo.tm_mon = static_cast<int>(d_date_time.month.value().get_value()) - 1;
86 }
87
88 // Day
89 if (d_date_time.day && static_cast<int>(d_date_time.day.value()) != 0)
90 {
91 // Day is counted from 1 to 31 in both std::tm and J2735
92 timeinfo.tm_mday = static_cast<int>(d_date_time.day.value());
93 }
94 else{
95 timeinfo.tm_mday = 1; // Default to 1 if day is not provided as C++ initializes to 0
96 }
97
98 // Hour
99 if (d_date_time.hour && static_cast<int>(d_date_time.hour.value()) != 31)
100 {
101 // Hour is counted from 0 to 23 in both std::tm and J2735
102 timeinfo.tm_hour = static_cast<int>(d_date_time.hour.value());
103 }
104
105 // Minute
106 if (d_date_time.minute && static_cast<int>(d_date_time.minute.value()) != 60)
107 {
108 // Minute is counted from 0 to 59 in both std::tm and J2735
109 timeinfo.tm_min = static_cast<int>(d_date_time.minute.value());
110 }
111 // Set seconds field (which actually uses ms in j2735) to 0
112 // for now and add milliseconds later
113 timeinfo.tm_sec = 0;
114
115 std::time_t timeT; // Integer number of seconds since the Unix epoch
116
117 // Treat the time as UTC to adjust the timezone offset later
118 // which also works if no timezone offset is provided because SAE J3224 SDSM is in UTC
119 timeinfo.tm_gmtoff = 0; // Set to 0 for UTC
120 timeinfo.tm_isdst = 0; // UTC doesn't observe DST
121 std::time_t utc_time = timegm(&timeinfo); // func is available on GNU-based systems (ubuntu etc)
122
123 if (d_date_time.time_zone_offset) {
124 // Offset is in minutes, convert to seconds
125 // If offset is negative (e.g., -5 for EST), we add 5 hours to get UTC
126 // If offset is positive (e.g., +9 for JST), we subtract 9 hours to get UTC
127 int offset_seconds = static_cast<int>(d_date_time.time_zone_offset.value()) * 60;
128 timeT = utc_time - offset_seconds; // Subtract offset to get UTC
129 } else {
130 // No timezone offset provided, assume input is already UTC
131 timeT = utc_time;
132 }
133
134 // Convert time_t to system_clock::time_point
135 auto timePoint = std::chrono::system_clock::from_time_t(timeT);
136
137 // Add milliseconds
138 int milliseconds = 0;
139 if (d_date_time.second)
140 {
141 milliseconds = static_cast<int>(d_date_time.second.value());
142 }
143 timePoint += std::chrono::milliseconds(milliseconds);
144
145 // Extract seconds and nanoseconds since epoch
146 auto duration = timePoint.time_since_epoch();
147 auto seconds = std::chrono::duration_cast<std::chrono::seconds>(duration);
148 auto nanoseconds = std::chrono::duration_cast<std::chrono::nanoseconds>(duration - seconds);
149
150 msg.sec = static_cast<int32_t>(seconds.count());
151 msg.nanosec = static_cast<uint32_t>(nanoseconds.count());
152
153 return msg;
154 }
155
156 // if simulation, we ignore the date, month, year etc because the simulation won't be that long
157 double seconds;
158 const auto fractional_secs{std::modf(
159 remove_units(units::time::second_t{d_date_time.hour.value_or(units::time::second_t{0.0})}) +
160 remove_units(units::time::second_t{d_date_time.minute.value_or(units::time::second_t{0.0})}) +
161 remove_units(units::time::second_t{d_date_time.second.value_or(units::time::second_t{0.0})}),
162 &seconds)};
163
164 msg.sec = static_cast<std::int32_t>(seconds);
165 msg.nanosec = static_cast<std::int32_t>(fractional_secs * 1e9);
166
167 return msg;
168}
169
171 -> DDateTime
172{
173 sdsm_time.second.value() += offset.measurement_time_offset;
174
175 return sdsm_time;
176}
177
178auto ned_to_enu(const PositionOffsetXYZ & offset_ned) noexcept
179{
180 auto offset_enu{offset_ned};
181
182 // NED to ENU: swap x and y axis and negate z axis
183 offset_enu.offset_x = offset_ned.offset_y;
184 offset_enu.offset_y = offset_ned.offset_x;
185
186 if (offset_enu.offset_z) {
187 offset_enu.offset_z.value() *= -1;
188 }
189
190 return offset_enu;
191}
192
193auto to_ddate_time_msg(const builtin_interfaces::msg::Time & builtin_time)
194 -> j2735_v2x_msgs::msg::DDateTime
195{
196 j2735_v2x_msgs::msg::DDateTime ddate_time_output;
197
198 // Add the time components from epoch seconds
199 boost::posix_time::ptime posix_time = boost::posix_time::from_time_t(builtin_time.sec) +
200 boost::posix_time::nanosec(builtin_time.nanosec);
201
202 const auto time_stamp_year = posix_time.date().year();
203 const auto time_stamp_month = posix_time.date().month();
204 const auto time_stamp_day = posix_time.date().day();
205
206 const auto hours_of_day = posix_time.time_of_day().hours();
207 const auto minutes_of_hour = posix_time.time_of_day().minutes();
208 const auto seconds_of_minute = posix_time.time_of_day().seconds();
209
210 ddate_time_output.presence_vector = 0;
211
212 ddate_time_output.presence_vector |= j2735_v2x_msgs::msg::DDateTime::YEAR;
213 ddate_time_output.year.year = time_stamp_year;
214 ddate_time_output.presence_vector |= j2735_v2x_msgs::msg::DDateTime::MONTH;
215 ddate_time_output.month.month = time_stamp_month;
216 ddate_time_output.presence_vector |= j2735_v2x_msgs::msg::DDateTime::DAY;
217 ddate_time_output.day.day = time_stamp_day;
218 ddate_time_output.presence_vector |= j2735_v2x_msgs::msg::DDateTime::HOUR;
219 ddate_time_output.hour.hour = hours_of_day;
220 ddate_time_output.presence_vector |= j2735_v2x_msgs::msg::DDateTime::MINUTE;
221 ddate_time_output.minute.minute = minutes_of_hour;
222 ddate_time_output.presence_vector |= j2735_v2x_msgs::msg::DDateTime::SECOND;
223 ddate_time_output.second.millisecond = seconds_of_minute;
224
225 return ddate_time_output;
226}
227
229 const builtin_interfaces::msg::Time & external_object_list_stamp,
230 const builtin_interfaces::msg::Time & external_object_stamp)
231 -> carma_v2x_msgs::msg::MeasurementTimeOffset
232{
233 carma_v2x_msgs::msg::MeasurementTimeOffset time_offset;
234
235 boost::posix_time::ptime external_object_list_time =
236 boost::posix_time::from_time_t(external_object_list_stamp.sec) +
237 boost::posix_time::nanosec(external_object_list_stamp.nanosec);
238
239 boost::posix_time::ptime external_object_time =
240 boost::posix_time::from_time_t(external_object_stamp.sec) +
241 boost::posix_time::nanosec(external_object_stamp.nanosec);
242
243 boost::posix_time::time_duration offset_duration =
244 (external_object_list_time - external_object_time);
245
246 time_offset.measurement_time_offset = offset_duration.total_seconds();
247
248 return time_offset;
249}
250
251auto to_position_msg(const UtmCoordinate & position_utm) -> geometry_msgs::msg::Point
252{
253 geometry_msgs::msg::Point msg;
254
255 msg.x = remove_units(position_utm.easting);
256 msg.y = remove_units(position_utm.northing);
257 msg.z = remove_units(position_utm.elevation);
258
259 return msg;
260}
261
262auto to_position_msg(const MapCoordinate & position_map) -> geometry_msgs::msg::Point
263{
264 geometry_msgs::msg::Point msg;
265
266 msg.x = remove_units(position_map.easting);
267 msg.y = remove_units(position_map.northing);
268 msg.z = remove_units(position_map.elevation);
269
270 return msg;
271}
272
273auto heading_to_enu_yaw(const units::angle::degree_t & heading) -> units::angle::degree_t
274{
275 return units::angle::degree_t{std::fmod(-(remove_units(heading) - 90.0) + 360.0, 360.0)};
276}
277
279 double yaw, const lanelet::BasicPoint3d & obj_pose,
280 const std::shared_ptr<lanelet::projection::LocalFrameProjector> & map_projection)
281 -> units::angle::degree_t
282{
283 // Get object geodetic position
284 lanelet::GPSPoint wgs_obj_pose = map_projection->reverse(obj_pose);
285
286 // Get WGS84 Heading
287 gsl::owner<PJ_CONTEXT *> context = proj_context_create();
288 gsl::owner<PJ *> transform = proj_create(context, map_projection->ECEF_PROJ_STR);
289 units::angle::degree_t grid_heading{std::fmod(90 - yaw + 360, 360)};
290
291 const auto factors = proj_factors(
292 transform, proj_coord(proj_torad(wgs_obj_pose.lon), proj_torad(wgs_obj_pose.lat), 0, 0));
293 units::angle::degree_t grid_convergence{proj_todeg(factors.meridian_convergence)};
294
295 auto wgs_heading = grid_convergence + grid_heading;
296
297 proj_destroy(transform);
298 proj_context_destroy(context);
299
300 return wgs_heading;
301}
302
303// determine the object position offset in m from the current reference pose
304// in map frame and external object pose
306 const geometry_msgs::msg::PoseStamped & source_pose,
307 const carma_v2x_msgs::msg::PositionOffsetXYZ & position_offset)
308 -> carma_v2x_msgs::msg::PositionOffsetXYZ
309{
310 carma_v2x_msgs::msg::PositionOffsetXYZ adjusted_offset;
311
312 adjusted_offset.offset_x.object_distance =
313 position_offset.offset_x.object_distance - source_pose.pose.position.x;
314 adjusted_offset.offset_y.object_distance =
315 position_offset.offset_y.object_distance - source_pose.pose.position.y;
316 adjusted_offset.offset_z.object_distance =
317 position_offset.offset_z.object_distance - source_pose.pose.position.z;
318 adjusted_offset.presence_vector = carma_v2x_msgs::msg::PositionOffsetXYZ::HAS_OFFSET_Z;
319
320 return adjusted_offset;
321}
322
324 const geometry_msgs::msg::PoseStamped & source_pose,
325 const std::shared_ptr<lanelet::projection::LocalFrameProjector> & map_projection)
326 -> carma_v2x_msgs::msg::Position3D
327{
328 carma_v2x_msgs::msg::Position3D ref_pos;
329 lanelet::BasicPoint3d source_pose_basicpoint{
330 source_pose.pose.position.x, source_pose.pose.position.y, 0.0};
331
332 lanelet::GPSPoint wgs84_ref_pose = map_projection->reverse(source_pose_basicpoint);
333
334 ref_pos.longitude = wgs84_ref_pose.lon;
335 ref_pos.latitude = wgs84_ref_pose.lat;
336 ref_pos.elevation = wgs84_ref_pose.ele;
337 ref_pos.elevation_exists = true;
338
339 return ref_pos;
340}
341
342// Helper function to convert a vector of uint8_t to a hex string
343// TemporaryID and octet string terms come from the SAE J2735 message definitions
344std::string to_string(const std::vector<std::uint8_t> & temporary_id) {
345 std::string str;
346 str.reserve(2 * std::size(temporary_id)); // Two hex characters per octet string
347
348 std::array<char, 2> buffer;
349 for (const auto & octet_string : temporary_id) {
350 std::to_chars(std::begin(buffer), std::end(buffer), octet_string, 16);
351 str.push_back(std::toupper(std::get<0>(buffer)));
352 str.push_back(std::toupper(std::get<1>(buffer)));
353 }
354
355 return str;
356};
357
358// Helper function to fill the type from J3224 ObjectType to CARMA Detection
360 const j3224_v2x_msgs::msg::ObjectType& j3224_obj_type)
361{
362 switch (j3224_obj_type.object_type) {
363 case j3224_obj_type.ANIMAL:
364 detection.motion_model = detection.MOTION_MODEL_CTRV;
365 // We don't have a good semantic class mapping for animals
366 detection.semantic_class = detection.SEMANTIC_CLASS_UNKNOWN;
367 break;
368 case j3224_obj_type.VRU:
369 detection.motion_model = detection.MOTION_MODEL_CTRV;
370 detection.semantic_class = detection.SEMANTIC_CLASS_PEDESTRIAN;
371 break;
372 case j3224_obj_type.VEHICLE:
373 detection.motion_model = detection.MOTION_MODEL_CTRV;
374 detection.semantic_class = detection.SEMANTIC_CLASS_SMALL_VEHICLE;
375 break;
376 default:
377 detection.motion_model = detection.MOTION_MODEL_CTRV;
378 detection.semantic_class = detection.SEMANTIC_CLASS_UNKNOWN;
379 }
380}
381// Helper function to fill all concariance related values from J3324 confidence values to covariance
383 const carma_v2x_msgs::msg::DetectedObjectCommonData& common_data,
384 const std::optional<SdsmToDetectionListConfig>& conversion_adjustment)
385{
386 // Variables to store original covariance values for debugging
387 double original_pose_covariance_x = 0.0;
388 double original_pose_covariance_y = 0.0;
389 double original_pose_covariance_z = 0.0;
390 double original_pose_covariance_yaw = 0.0;
391 double original_twist_covariance_x = 0.0;
392 double original_twist_covariance_z = 0.0;
393 double original_twist_covariance_yaw = 0.0;
394
395 // Calculate and log original pose covariance values
396 try {
397 original_pose_covariance_x =
398 0.5 * std::pow(j2735_v2x_msgs::to_double(common_data.pos_confidence.pos).value(), 2);
399 original_pose_covariance_y = original_pose_covariance_x;
400
401 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
402 "Original pose covariance X/Y: " << original_pose_covariance_x);
403 } catch (const std::bad_optional_access &) {
404 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
405 "Missing position confidence");
406 }
407
408 try {
409 original_pose_covariance_z =
410 0.5 * std::pow(j2735_v2x_msgs::to_double(common_data.pos_confidence.elevation).value(), 2);
411
412 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
413 "Original pose covariance Z: " << original_pose_covariance_z);
414 } catch (const std::bad_optional_access &) {
415 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
416 "Missing elevation confidence");
417 }
418
419 try {
420 // Get original heading/yaw covariance
421 original_pose_covariance_yaw =
422 0.5 * std::pow(j2735_v2x_msgs::to_double(common_data.heading_conf).value(), 2);
423
424 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
425 "Original pose covariance yaw: " << original_pose_covariance_yaw);
426 } catch (const std::bad_optional_access &) {
427 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
428 "Missing heading confidence");
429 }
430
431 try {
432 // Get original linear x velocity covariance
433 original_twist_covariance_x =
434 0.5 * std::pow(j2735_v2x_msgs::to_double(common_data.speed_confidence).value(), 2);
435
436 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
437 "Original twist covariance X: " << original_twist_covariance_x);
438 } catch (const std::bad_optional_access &) {
439 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
440 "Missing speed confidence");
441 }
442
443 if (!common_data.speed_z.unavailable){
444 try {
445 // Get original linear z velocity covariance
446 original_twist_covariance_z =
447 0.5 * std::pow(j2735_v2x_msgs::to_double(common_data.speed_confidence_z).value(), 2);
448
449 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
450 "Original twist covariance Z: " << original_twist_covariance_z);
451 } catch (const std::bad_optional_access &) {
452 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
453 "Missing z-speed confidence");
454 }
455 }
456 else{
457 original_twist_covariance_z = 0.0;
458 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
459 "Original twist covariance Z: 0.0 (speed_z not provided)");
460 }
461
462 // Having non-zero value means available
463 if(static_cast<bool>(common_data.accel_4_way.yaw_rate)){
464 try {
465 // Get original angular z velocity (yaw rate) covariance
466 original_twist_covariance_yaw =
467 0.5 * std::pow(j2735_v2x_msgs::to_double(common_data.acc_cfd_yaw).value(), 2);
468
469 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
470 "Original twist covariance yaw: " << original_twist_covariance_yaw);
471 } catch (const std::bad_optional_access &) {
472 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
473 "Missing yaw-rate confidence");
474 }
475 }
476 else{
477 original_twist_covariance_yaw = 0.0;
478 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
479 "Original twist covariance yaw: 0.0 (yaw_rate not provided)");
480 }
481
482 if (conversion_adjustment && conversion_adjustment.value().overwrite_covariance)
483 {
484 // Hardcoded pose covariance
485 detection.pose.covariance[0] = conversion_adjustment.value().pose_covariance_x;
486 detection.pose.covariance[7] = conversion_adjustment.value().pose_covariance_y;
487 detection.pose.covariance[14] = conversion_adjustment.value().pose_covariance_z;
488 detection.pose.covariance[35] = conversion_adjustment.value().pose_covariance_yaw;
489
490 // Hardcoded twist covariance
491 detection.twist.covariance[0] = conversion_adjustment.value().twist_covariance_x;
492 detection.twist.covariance[14] = conversion_adjustment.value().twist_covariance_z;
493 detection.twist.covariance[35] = conversion_adjustment.value().twist_covariance_yaw;
494
495 // Print comparison between original and hardcoded values
496 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
497 "POSE COVARIANCE COMPARISON - Original vs Hardcoded: " <<
498 "X: " << original_pose_covariance_x << " -> " << detection.pose.covariance[0] << ", " <<
499 "Y: " << original_pose_covariance_y << " -> " << detection.pose.covariance[7] << ", " <<
500 "Z: " << original_pose_covariance_z << " -> " << detection.pose.covariance[14] << ", " <<
501 "Yaw: " << original_pose_covariance_yaw << " -> " << detection.pose.covariance[35]);
502
503 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
504 "TWIST COVARIANCE COMPARISON - Original vs Hardcoded: " <<
505 "X: " << original_twist_covariance_x << " -> , " << detection.twist.covariance[0] << ", " <<
506 "Z: " << original_twist_covariance_z << " -> , " << detection.twist.covariance[14] << ", " <<
507 "Yaw: " << original_twist_covariance_yaw << " -> " << detection.twist.covariance[35]);
508 }
509 else
510 {
511 // Original pose covariance
512 detection.pose.covariance[0] = original_pose_covariance_x;
513 detection.pose.covariance[7] = original_pose_covariance_y;
514 detection.pose.covariance[14] = original_pose_covariance_z;
515 detection.pose.covariance[35] = original_pose_covariance_yaw;
516
517 // Original twist covariance
518 detection.twist.covariance[0] = original_twist_covariance_x;
519 detection.twist.covariance[14] = original_twist_covariance_z;
520 detection.twist.covariance[35] = original_twist_covariance_yaw;
521 }
522
523 // Fill zeros for all other twist covariance values
524 for (size_t i = 0; i < 36; ++i) {
525 if (i != 0 && i != 14 && i != 35) {
526 detection.twist.covariance[i] = 0.0;
527 }
528 }
529
530 // Fill zeros for all other pose covariance values
531 for (size_t i = 0; i < 36; ++i) {
532 if (i != 0 && i != 7 && i != 14 && i != 35) {
533 detection.pose.covariance[i] = 0.0;
534 }
535 }
536}
567 const carma_v2x_msgs::msg::SensorDataSharingMessage & sdsm, std::string_view georeference,
568 bool is_simulation, const std::optional<SdsmToDetectionListConfig>& conversion_adjustment)
569 -> carma_cooperative_perception_interfaces::msg::DetectionList
570{
571 carma_cooperative_perception_interfaces::msg::DetectionList detection_list;
572 try{
573
574 const auto ref_pos_3d{Position3D::from_msg(sdsm.ref_pos)};
575
576 units::length::meter_t elevation(0.0);
577 if(ref_pos_3d.elevation){
578 elevation = ref_pos_3d.elevation.value();
579 }
580 const Wgs84Coordinate ref_pos_wgs84{
581 ref_pos_3d.latitude, ref_pos_3d.longitude, elevation};
582
583 const auto ref_pos_map{project_to_carma_map(ref_pos_wgs84, georeference)};
584
585 for (const auto & object_data : sdsm.objects.detected_object_data) {
586 const auto common_data{object_data.detected_object_common_data};
587
589 detection.header.frame_id = "map";
590
591 const auto detection_time{calc_detection_time_stamp(
592 DDateTime::from_msg(sdsm.sdsm_time_stamp),
593 MeasurementTimeOffset::from_msg(common_data.measurement_time))};
594
595 detection.header.stamp = to_time_msg(detection_time, is_simulation);
596
597 detection.id = fmt::format("{}-{}",
599 common_data.detected_id.object_id);
600
601 const auto pos_offset_enu{ned_to_enu(PositionOffsetXYZ::from_msg(common_data.pos))};
602 detection.pose.pose.position = to_position_msg(MapCoordinate{
603 ref_pos_map.easting + pos_offset_enu.offset_x, ref_pos_map.northing + pos_offset_enu.offset_y,
604 ref_pos_map.elevation + pos_offset_enu.offset_z.value_or(units::length::meter_t{0.0})});
605
606 // Adjust object's position to match vector map coordinates as sensor calibrations are not
607 // always reliable
608 if (conversion_adjustment && conversion_adjustment.value().adjust_pose)
609 {
610 detection.pose.pose.position.x += conversion_adjustment.value().x_offset;
611 detection.pose.pose.position.y += conversion_adjustment.value().y_offset;
612 }
613
614 const auto true_heading{units::angle::degree_t{Heading::from_msg(common_data.heading).heading}};
615
616 // Note: This should really use the detection's WGS-84 position, so the
617 // convergence will be off slightly. TODO
618 const units::angle::degree_t grid_convergence{
619 calculate_grid_convergence(ref_pos_wgs84, georeference)};
620
621 const auto grid_heading{true_heading - grid_convergence};
622 const auto enu_yaw{heading_to_enu_yaw(grid_heading)};
623
624 tf2::Quaternion quat_tf;
625
626 if (conversion_adjustment && conversion_adjustment.value().adjust_pose)
627 {
628 // Adjust object's heading to match vector map coordinates as sensor calibrations are not
629 // always reliable
630 auto yaw_with_offset = units::angle::radian_t{enu_yaw} +
631 units::angle::radian_t{units::angle::degree_t{conversion_adjustment.value().yaw_offset}};
632 auto new_yaw = std::fmod(remove_units(yaw_with_offset) + 2 * M_PI, 2 * M_PI);
633 quat_tf.setRPY(0, 0, new_yaw);
634 }
635 else
636 {
637 // No adjustment needed
638 quat_tf.setRPY(0, 0, remove_units(units::angle::radian_t{enu_yaw}));
639 }
640
641 detection.pose.pose.orientation = tf2::toMsg(quat_tf);
642
643 const auto speed{Speed::from_msg(common_data.speed)};
644 detection.twist.twist.linear.x =
645 remove_units(units::velocity::meters_per_second_t{speed.speed});
646
647 if (!common_data.speed_z.unavailable){
648 const auto speed_z{Speed::from_msg(common_data.speed_z)};
649 detection.twist.twist.linear.z =
650 remove_units(units::velocity::meters_per_second_t{speed_z.speed});
651 }
652 else{
653 detection.twist.twist.linear.z = remove_units(units::velocity::meters_per_second_t{0.0});
654 }
655
656 // NOTE: common_data.accel_4_way.longitudinal, lateral, vert not supported
657 // and not needed at the moment for multiple object tracking algorithm
658 // Having non-zero yaw_rate value means available
659 if(static_cast<bool>(common_data.accel_4_way.yaw_rate)){
660 const auto accel_set{AccelerationSet4Way::from_msg(common_data.accel_4_way)};
661 detection.twist.twist.angular.z =
662 remove_units(units::angular_velocity::degrees_per_second_t{accel_set.yaw_rate});
663 }
664 else{
665 detection.twist.twist.angular.z = 0.0;
666 }
667
668 convert_covariances(detection, common_data, conversion_adjustment);
669
670 convert_object_type(detection, common_data.obj_type);
671
672 detection_list.detections.push_back(std::move(detection));
673 }
674 }
675 catch (...) {
676 RCLCPP_ERROR_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"), "Error converting SDSM to object, ignoring sdsm message.");
677 }
678
679 return detection_list;
680}
681
683 const carma_perception_msgs::msg::ExternalObject & object,
684 const MotionModelMapping & motion_model_mapping)
686{
688
689 detection.header = object.header;
690
691 if (object.presence_vector & object.BSM_ID_PRESENCE_VECTOR) {
692 detection.id = "";
693 std::transform(
694 std::cbegin(object.bsm_id), std::cend(object.bsm_id), std::back_inserter(detection.id),
695 [](const auto & i) { return i + '0'; });
696 }
697
698 if (object.presence_vector & object.ID_PRESENCE_VECTOR) {
699 detection.id += '-' + std::to_string(object.id);
700 }
701
702 if (object.presence_vector & object.POSE_PRESENCE_VECTOR) {
703 detection.pose = object.pose;
704 }
705
706 if (object.presence_vector & object.VELOCITY_PRESENCE_VECTOR) {
707 detection.twist = object.velocity;
708 }
709
710 if (object.presence_vector & object.OBJECT_TYPE_PRESENCE_VECTOR) {
711 switch (object.object_type) {
712 case object.SMALL_VEHICLE:
713 detection.motion_model = motion_model_mapping.small_vehicle_model;
714 detection.semantic_class = detection.SEMANTIC_CLASS_SMALL_VEHICLE;
715 break;
716 case object.LARGE_VEHICLE:
717 detection.motion_model = motion_model_mapping.large_vehicle_model;
718 detection.semantic_class = detection.SEMANTIC_CLASS_LARGE_VEHICLE;
719 break;
720 case object.MOTORCYCLE:
721 detection.motion_model = motion_model_mapping.motorcycle_model;
722 detection.semantic_class = detection.SEMANTIC_CLASS_MOTORCYCLE;
723 break;
724 case object.PEDESTRIAN:
725 detection.motion_model = motion_model_mapping.pedestrian_model;
726 detection.semantic_class = detection.SEMANTIC_CLASS_PEDESTRIAN;
727 break;
728 case object.UNKNOWN:
729 default:
730 detection.motion_model = motion_model_mapping.unknown_model;
731 detection.semantic_class = detection.SEMANTIC_CLASS_UNKNOWN;
732 }
733 }
734
735 return detection;
736}
737
739 const carma_perception_msgs::msg::ExternalObjectList & object_list,
740 const MotionModelMapping & motion_model_mapping)
741 -> carma_cooperative_perception_interfaces::msg::DetectionList
742{
743 carma_cooperative_perception_interfaces::msg::DetectionList detection_list;
744
745 std::transform(
746 std::cbegin(object_list.objects), std::cend(object_list.objects),
747 std::back_inserter(detection_list.detections),
748 [&motion_model_mapping = std::as_const(motion_model_mapping)](const auto & object) {
749 return to_detection_msg(object, motion_model_mapping);
750 });
751
752 return detection_list;
753}
754
756 -> carma_perception_msgs::msg::ExternalObject
757{
758 carma_perception_msgs::msg::ExternalObject external_object;
759 external_object.header = track.header;
760 external_object.presence_vector = 0;
761
762 const auto to_numeric_id = [](std::string string_id) -> std::optional<uint32_t> {
763 auto non_digit_start = std::remove_if(
764 std::begin(string_id), std::end(string_id),
765 [](const auto & ch) { return !std::isdigit(ch); });
766
767 std::uint32_t numeric_id;
768 const auto digit_substr_size{std::distance(std::begin(string_id), non_digit_start)};
769 if (
770 std::from_chars(string_id.c_str(), string_id.c_str() + digit_substr_size, numeric_id).ec ==
771 std::errc{}) {
772 return numeric_id;
773 }
774
775 return std::nullopt;
776 };
777
778 if (const auto numeric_id{to_numeric_id(track.id)}) {
779 external_object.presence_vector |= external_object.ID_PRESENCE_VECTOR;
780 external_object.id = numeric_id.value();
781 } else {
782 external_object.presence_vector &= ~external_object.ID_PRESENCE_VECTOR;
783 }
784
785 external_object.presence_vector |= external_object.POSE_PRESENCE_VECTOR;
786 external_object.pose = track.pose;
787
788 external_object.presence_vector |= external_object.VELOCITY_PRESENCE_VECTOR;
789
790 const auto track_longitudinal_velocity{track.twist.twist.linear.x};
791 const auto track_orientation = track.pose.pose.orientation;
792
793 tf2::Quaternion q(
794 track_orientation.x, track_orientation.y, track_orientation.z, track_orientation.w);
795 tf2::Matrix3x3 m(q);
796 double roll, pitch, yaw;
797 m.getRPY(roll, pitch, yaw);
798
799 external_object.velocity.twist.linear.x = track_longitudinal_velocity * std::cos(yaw);
800 external_object.velocity.twist.linear.y = track_longitudinal_velocity * std::sin(yaw);
801
802 external_object.object_type = track.semantic_class;
803
804 external_object.presence_vector |= external_object.OBJECT_TYPE_PRESENCE_VECTOR;
805 switch (track.semantic_class) {
806 case track.SEMANTIC_CLASS_SMALL_VEHICLE:
807 external_object.object_type = external_object.SMALL_VEHICLE;
808 break;
809 case track.SEMANTIC_CLASS_LARGE_VEHICLE:
810 external_object.object_type = external_object.LARGE_VEHICLE;
811 break;
812 case track.SEMANTIC_CLASS_MOTORCYCLE:
813 external_object.object_type = external_object.MOTORCYCLE;
814 break;
815 case track.SEMANTIC_CLASS_PEDESTRIAN:
816 external_object.object_type = external_object.PEDESTRIAN;
817 break;
818 case track.SEMANTIC_CLASS_UNKNOWN:
819 default:
820 external_object.object_type = external_object.UNKNOWN;
821 }
822
823 return external_object;
824}
825
827 const carma_cooperative_perception_interfaces::msg::TrackList & track_list)
828 -> carma_perception_msgs::msg::ExternalObjectList
829{
830 carma_perception_msgs::msg::ExternalObjectList external_object_list;
831
832 for (const auto & track : track_list.tracks) {
833 external_object_list.objects.push_back(to_external_object_msg(track));
834 }
835
836 return external_object_list;
837}
838
840 const carma_perception_msgs::msg::ExternalObjectList & external_object_list,
841 const geometry_msgs::msg::PoseStamped & current_pose,
842 const std::shared_ptr<lanelet::projection::LocalFrameProjector> & map_projection)
843 -> carma_v2x_msgs::msg::SensorDataSharingMessage
844{
845 carma_v2x_msgs::msg::SensorDataSharingMessage sdsm;
846 carma_v2x_msgs::msg::DetectedObjectList detected_object_list;
847
848 sdsm.sdsm_time_stamp = to_ddate_time_msg(external_object_list.header.stamp);
849
850 sdsm.ref_pos = transform_pose_from_map_to_wgs84(current_pose, map_projection);
851
852 // Convert external objects within the list to detected_object_data
853 for (const auto & external_object : external_object_list.objects) {
854 auto sdsm_detected_object = to_detected_object_data_msg(external_object, map_projection);
855
856 // Calculate the time offset between individual objects and the respective SDSM container msg
857 sdsm_detected_object.detected_object_common_data.measurement_time =
858 calc_sdsm_time_offset(external_object.header.stamp, external_object.header.stamp);
859
860 // Calculate the position offset from the current reference pose (in m)
861 sdsm_detected_object.detected_object_common_data.pos =
862 calc_relative_position(current_pose, sdsm_detected_object.detected_object_common_data.pos);
863
864 detected_object_list.detected_object_data.push_back(sdsm_detected_object);
865 }
866
867 std::vector<uint8_t> id = {0x00, 0x00, 0x00, 0x01};
868 sdsm.source_id.id = id;
869 sdsm.equipment_type.equipment_type = j3224_v2x_msgs::msg::EquipmentType::OBU;
870 sdsm.ref_pos_xy_conf.semi_major = j2735_v2x_msgs::msg::PositionalAccuracy::ACCURACY_UNAVAILABLE;
871 sdsm.ref_pos_xy_conf.semi_minor = j2735_v2x_msgs::msg::PositionalAccuracy::ACCURACY_UNAVAILABLE;
872 sdsm.ref_pos_xy_conf.orientation =
873 j2735_v2x_msgs::msg::PositionalAccuracy::ACCURACY_ORIENTATION_UNAVAILABLE;
874 sdsm.objects = detected_object_list;
875
876 return sdsm;
877}
878
880 const carma_perception_msgs::msg::ExternalObject & external_object,
881 const std::shared_ptr<lanelet::projection::LocalFrameProjector> & map_projection)
882 -> carma_v2x_msgs::msg::DetectedObjectData
883{
884 carma_v2x_msgs::msg::DetectedObjectData detected_object_data;
885 detected_object_data.presence_vector = 0;
886
887 carma_v2x_msgs::msg::DetectedObjectCommonData detected_object_common_data;
888 detected_object_common_data.presence_vector = 0;
889
890 // common data //////////
891
892 // obj_type_conf - convert from percentile, cast to proper uint type
893 if (external_object.presence_vector & external_object.OBJECT_TYPE_PRESENCE_VECTOR) {
894 detected_object_common_data.obj_type_cfd.classification_confidence =
895 static_cast<std::uint8_t>(external_object.confidence * 100);
896 }
897
898 // detected_id - cast proper type
899 if (external_object.presence_vector & external_object.ID_PRESENCE_VECTOR) {
900 detected_object_common_data.detected_id.object_id =
901 static_cast<std::uint16_t>(external_object.id);
902 }
903
904 // pos - Add offset to ref_pos to get object position
905 // in map frame -> convert to WGS84 coordinates for sdsm
906
907 // To get offset: Subtract the external object pose from
908 // the current vehicle location given by the current_pose topic
909 if (external_object.presence_vector & external_object.POSE_PRESENCE_VECTOR) {
910 detected_object_common_data.pos.offset_x.object_distance =
911 static_cast<float>(external_object.pose.pose.position.x);
912 detected_object_common_data.pos.offset_y.object_distance =
913 static_cast<float>(external_object.pose.pose.position.y);
914 detected_object_common_data.pos.offset_z.object_distance =
915 static_cast<float>(external_object.pose.pose.position.z);
916 }
917
918 // speed/speed_z - convert vector velocity to scalar speed val given x/y components
919 if (external_object.presence_vector & external_object.VELOCITY_PRESENCE_VECTOR) {
920 detected_object_common_data.speed.speed =
921 std::hypot(external_object.velocity.twist.linear.x, external_object.velocity.twist.linear.y);
922
923 detected_object_common_data.presence_vector |=
924 carma_v2x_msgs::msg::DetectedObjectCommonData::HAS_SPEED_Z;
925 detected_object_common_data.speed_z.speed = external_object.velocity.twist.linear.z;
926
927 // heading - convert ang vel to scale heading
928 lanelet::BasicPoint3d external_object_position{
929 external_object.pose.pose.position.x, external_object.pose.pose.position.y,
930 external_object.pose.pose.position.z};
931 // Get yaw from orientation
932 auto obj_orientation = external_object.pose.pose.orientation;
933 tf2::Quaternion q(obj_orientation.x, obj_orientation.y, obj_orientation.z, obj_orientation.w);
934 tf2::Matrix3x3 m(q);
935 double roll, pitch, yaw;
936 m.getRPY(roll, pitch, yaw);
937
938 detected_object_common_data.heading.heading =
939 remove_units(enu_orientation_to_true_heading(yaw, external_object_position, map_projection));
940 }
941
942 // optional data (determine based on object type)
943 // use object type struct for better control
944 carma_v2x_msgs::msg::DetectedObjectOptionalData detected_object_optional_data;
945
946 switch (external_object.object_type) {
947 case external_object.SMALL_VEHICLE:
948 detected_object_common_data.obj_type.object_type = j3224_v2x_msgs::msg::ObjectType::VEHICLE;
949
950 if (external_object.presence_vector & external_object.SIZE_PRESENCE_VECTOR) {
951 detected_object_optional_data.det_veh.presence_vector =
952 carma_v2x_msgs::msg::DetectedVehicleData::HAS_SIZE;
953 detected_object_optional_data.det_veh.presence_vector |=
954 carma_v2x_msgs::msg::DetectedVehicleData::HAS_HEIGHT;
955
956 detected_object_optional_data.det_veh.size.vehicle_width = external_object.size.y;
957 detected_object_optional_data.det_veh.size.vehicle_length = external_object.size.x;
958 detected_object_optional_data.det_veh.height.vehicle_height = external_object.size.z;
959 }
960 break;
961 case external_object.LARGE_VEHICLE:
962 detected_object_common_data.obj_type.object_type = j3224_v2x_msgs::msg::ObjectType::VEHICLE;
963
964 if (external_object.presence_vector & external_object.SIZE_PRESENCE_VECTOR) {
965 detected_object_optional_data.det_veh.presence_vector =
966 carma_v2x_msgs::msg::DetectedVehicleData::HAS_SIZE;
967 detected_object_optional_data.det_veh.presence_vector |=
968 carma_v2x_msgs::msg::DetectedVehicleData::HAS_HEIGHT;
969
970 detected_object_optional_data.det_veh.size.vehicle_width = external_object.size.y;
971 detected_object_optional_data.det_veh.size.vehicle_length = external_object.size.x;
972 detected_object_optional_data.det_veh.height.vehicle_height = external_object.size.z;
973 }
974 break;
975 case external_object.MOTORCYCLE:
976 detected_object_common_data.obj_type.object_type = j3224_v2x_msgs::msg::ObjectType::VEHICLE;
977
978 if (external_object.presence_vector & external_object.SIZE_PRESENCE_VECTOR) {
979 detected_object_optional_data.det_veh.presence_vector =
980 carma_v2x_msgs::msg::DetectedVehicleData::HAS_SIZE;
981 detected_object_optional_data.det_veh.presence_vector |=
982 carma_v2x_msgs::msg::DetectedVehicleData::HAS_HEIGHT;
983
984 detected_object_optional_data.det_veh.size.vehicle_width = external_object.size.y;
985 detected_object_optional_data.det_veh.size.vehicle_length = external_object.size.x;
986 detected_object_optional_data.det_veh.height.vehicle_height = external_object.size.z;
987 }
988 break;
989 case external_object.PEDESTRIAN:
990 detected_object_common_data.obj_type.object_type = j3224_v2x_msgs::msg::ObjectType::VRU;
991
992 detected_object_optional_data.det_vru.presence_vector =
993 carma_v2x_msgs::msg::DetectedVRUData::HAS_BASIC_TYPE;
994 detected_object_optional_data.det_vru.basic_type.type |=
995 j2735_v2x_msgs::msg::PersonalDeviceUserType::A_PEDESTRIAN;
996
997 break;
998 case external_object.UNKNOWN:
999 default:
1000 detected_object_common_data.obj_type.object_type = j3224_v2x_msgs::msg::ObjectType::UNKNOWN;
1001
1002 if (external_object.presence_vector & external_object.SIZE_PRESENCE_VECTOR) {
1003 detected_object_optional_data.det_obst.obst_size.width.size_value = external_object.size.y;
1004 detected_object_optional_data.det_obst.obst_size.length.size_value = external_object.size.x;
1005
1006 detected_object_optional_data.det_obst.obst_size.presence_vector =
1007 carma_v2x_msgs::msg::ObstacleSize::HAS_HEIGHT;
1008 detected_object_optional_data.det_obst.obst_size.height.size_value = external_object.size.z;
1009 }
1010 }
1011
1012 detected_object_data.detected_object_common_data = std::move(detected_object_common_data);
1013 detected_object_data.detected_object_optional_data = std::move(detected_object_optional_data);
1014
1015 return detected_object_data;
1016}
1017
1018} // namespace carma_cooperative_perception
auto enu_orientation_to_true_heading(double yaw, const lanelet::BasicPoint3d &obj_pose, const std::shared_ptr< lanelet::projection::LocalFrameProjector > &map_projection) -> units::angle::degree_t
void convert_covariances(carma_cooperative_perception_interfaces::msg::Detection &detection, const carma_v2x_msgs::msg::DetectedObjectCommonData &common_data, const std::optional< SdsmToDetectionListConfig > &conversion_adjustment)
auto to_external_object_list_msg(const carma_cooperative_perception_interfaces::msg::TrackList &track_list) -> carma_perception_msgs::msg::ExternalObjectList
auto calc_sdsm_time_offset(const builtin_interfaces::msg::Time &external_object_list_time, const builtin_interfaces::msg::Time &external_object_time) -> carma_v2x_msgs::msg::MeasurementTimeOffset
std::string to_string(const std::vector< std::uint8_t > &temporary_id)
auto calculate_grid_convergence(const Wgs84Coordinate &position, const UtmZone &zone) -> units::angle::degree_t
Calculate grid convergence at a given position.
Definition: geodetic.cpp:154
auto to_ddate_time_msg(const builtin_interfaces::msg::Time &builtin_time) -> j2735_v2x_msgs::msg::DDateTime
auto calc_detection_time_stamp(DDateTime d_date_time, const MeasurementTimeOffset &offset) -> DDateTime
std::variant< multiple_object_tracking::CtrvDetection, multiple_object_tracking::CtraDetection > Detection
auto project_to_carma_map(const Wgs84Coordinate &coordinate, std::string_view proj_string) -> MapCoordinate
Definition: geodetic.cpp:55
auto to_string(const UtmZone &zone) -> std::string
Definition: utm_zone.cpp:21
auto to_detection_msg(const carma_perception_msgs::msg::ExternalObject &object, const MotionModelMapping &motion_model_mapping) -> carma_cooperative_perception_interfaces::msg::Detection
auto to_time_msg(const DDateTime &d_date_time, bool is_simulation) -> builtin_interfaces::msg::Time
auto ned_to_enu(const PositionOffsetXYZ &offset_ned) noexcept
auto to_detected_object_data_msg(const carma_perception_msgs::msg::ExternalObject &external_object, const std::shared_ptr< lanelet::projection::LocalFrameProjector > &map_projection) -> carma_v2x_msgs::msg::DetectedObjectData
void convert_object_type(carma_cooperative_perception_interfaces::msg::Detection &detection, const j3224_v2x_msgs::msg::ObjectType &j3224_obj_type)
auto to_external_object_msg(const carma_cooperative_perception_interfaces::msg::Track &track) -> carma_perception_msgs::msg::ExternalObject
auto heading_to_enu_yaw(const units::angle::degree_t &heading) -> units::angle::degree_t
auto calc_relative_position(const geometry_msgs::msg::PoseStamped &current_pose, const carma_v2x_msgs::msg::PositionOffsetXYZ &detected_object_data) -> carma_v2x_msgs::msg::PositionOffsetXYZ
auto to_sdsm_msg(const carma_perception_msgs::msg::ExternalObjectList &external_object_list, const geometry_msgs::msg::PoseStamped &current_pose, const std::shared_ptr< lanelet::projection::LocalFrameProjector > &map_projection) -> carma_v2x_msgs::msg::SensorDataSharingMessage
constexpr auto remove_units(const T &value)
auto to_position_msg(const UtmCoordinate &position_utm) -> geometry_msgs::msg::Point
std::variant< multiple_object_tracking::CtrvTrack, multiple_object_tracking::CtraTrack > Track
auto transform_pose_from_map_to_wgs84(const geometry_msgs::msg::PoseStamped &source_pose, const std::shared_ptr< lanelet::projection::LocalFrameProjector > &map_projection) -> carma_v2x_msgs::msg::Position3D
auto to_detection_list_msg(const carma_v2x_msgs::msg::SensorDataSharingMessage &sdsm, std::string_view georeference, bool is_simulation, const std::optional< SdsmToDetectionListConfig > &conversion_adjustment) -> carma_cooperative_perception_interfaces::msg::DetectionList
Converts a carma_v2x_msgs::msg::SensorDataSharingMessage (SDSM) to carma_cooperative_perception_inter...
static auto from_msg(const j2735_v2x_msgs::msg::AccelerationSet4Way &msg) -> AccelerationSet4Way
Definition: j2735_types.cpp:55
std::optional< units::time::millisecond_t > second
Definition: j2735_types.hpp:43
static auto from_msg(const j2735_v2x_msgs::msg::DDateTime &msg) -> DDateTime
Definition: j2735_types.cpp:19
static auto from_msg(const j2735_v2x_msgs::msg::Heading &heading) -> Heading
static auto from_msg(const j3224_v2x_msgs::msg::MeasurementTimeOffset &msg) -> MeasurementTimeOffset
Definition: j3224_types.cpp:47
static auto from_msg(const j2735_v2x_msgs::msg::Position3D &msg) -> Position3D
Definition: j2735_types.cpp:75
static auto from_msg(const j3224_v2x_msgs::msg::PositionOffsetXYZ &msg) -> PositionOffsetXYZ
Definition: j3224_types.cpp:19
static auto from_msg(const j2735_v2x_msgs::msg::Speed &speed) -> Speed
Represents a position using UTM coordinates.
Definition: geodetic.hpp:50
Represents a position using WGS-84 coordinates.
Definition: geodetic.hpp:33