Carma-platform v4.2.0
CARMA Platform is built on robot operating system (ROS) and utilizes open source software (OSS) that enables Cooperative Driving Automation (CDA) features to allow Automated Driving Systems to interact and cooperate with infrastructure and other vehicles through communication.
msg_conversion.cpp
Go to the documentation of this file.
1// Copyright 2023 Leidos
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
16
17#include <carma_cooperative_perception_interfaces/msg/track.hpp>
18#include <carma_cooperative_perception_interfaces/msg/track_list.hpp>
19#include <carma_perception_msgs/msg/external_object.hpp>
20#include <carma_perception_msgs/msg/external_object_list.hpp>
21#include <j2735_v2x_msgs/msg/d_date_time.hpp>
22#include <j2735_v2x_msgs/msg/personal_device_user_type.hpp>
23#include <j2735_v2x_msgs/msg/positional_accuracy.hpp>
24#include <j2735_v2x_msgs/to_floating_point.hpp>
25#include <j3224_v2x_msgs/msg/detected_object_data.hpp>
26#include <j3224_v2x_msgs/msg/equipment_type.hpp>
27#include <j3224_v2x_msgs/msg/measurement_time_offset.hpp>
28#include <j3224_v2x_msgs/msg/object_type.hpp>
29#include <tf2_geometry_msgs/tf2_geometry_msgs.hpp>
30
31#include <algorithm>
32#include <cctype>
33#include <charconv>
34#include <chrono>
35#include <cmath>
36#include <limits>
37#include <numeric>
38#include <string>
39#include <utility>
40#include <cstdlib> // for setenv, unsetenv
41#include <ctime> // for tzset
42#include <fmt/format.h>
43
44#include <proj.h>
45#include <gsl/pointers>
46#include <memory>
47
52
53#include <lanelet2_core/geometry/Lanelet.h>
54#include <lanelet2_extension/projection/local_frame_projector.h>
55#include <boost/date_time/posix_time/conversion.hpp>
56#include <boost/date_time/posix_time/posix_time_io.hpp>
57#include "boost/date_time/posix_time/posix_time.hpp"
58
60{
61auto to_time_msg(const DDateTime & d_date_time, bool is_simulation) -> builtin_interfaces::msg::Time
62{
63 // Convert DDateTime to builtin_interfaces::msg::Time
64 builtin_interfaces::msg::Time msg;
65 if (!is_simulation) {
66 // Create a tm structure to hold the date and time components
67 std::tm timeinfo = {};
68
69 // Year
70 if (d_date_time.year){
71 if(remove_units(d_date_time.year.value()) >= 1970){
72 // std::tm is counted since 1900
73 timeinfo.tm_year = static_cast<int>(remove_units(d_date_time.year.value())) - 1900;
74 }
75 else{
76 throw std::invalid_argument(
77 "Year must be greater than 1970 for live date/time conversion");
78 }
79 }
80
81 // Month
82 if (d_date_time.month && static_cast<int>(d_date_time.month.value().get_value()) != 0)
83 {
84 // std::tm is counted from 0 to 11, J2735 is counted from 1 to 12
85 timeinfo.tm_mon = static_cast<int>(d_date_time.month.value().get_value()) - 1;
86 }
87
88 // Day
89 if (d_date_time.day && static_cast<int>(d_date_time.day.value()) != 0)
90 {
91 // Day is counted from 1 to 31 in both std::tm and J2735
92 timeinfo.tm_mday = static_cast<int>(d_date_time.day.value());
93 }
94 else{
95 timeinfo.tm_mday = 1; // Default to 1 if day is not provided as C++ initializes to 0
96 }
97
98 // Hour
99 if (d_date_time.hour && static_cast<int>(d_date_time.hour.value()) != 31)
100 {
101 // Hour is counted from 0 to 23 in both std::tm and J2735
102 timeinfo.tm_hour = static_cast<int>(d_date_time.hour.value());
103 }
104
105 // Minute
106 if (d_date_time.minute && static_cast<int>(d_date_time.minute.value()) != 60)
107 {
108 // Minute is counted from 0 to 59 in both std::tm and J2735
109 timeinfo.tm_min = static_cast<int>(d_date_time.minute.value());
110 }
111 // Set seconds field (which actually uses ms in j2735) to 0
112 // for now and add milliseconds later
113 timeinfo.tm_sec = 0;
114
115 std::time_t timeT;
116
117 if (d_date_time.time_zone_offset)
118 {
119 timeinfo.tm_gmtoff = static_cast<int>(d_date_time.time_zone_offset.value());
120 timeT = std::mktime(&timeinfo);
121 }
122 else
123 {
124 // Get the current timezone from the system
125 // Use tzset() to initialize timezone data from system
126 tzset();
127
128 // Get current timestamp to determine DST status
129 // NOTE: If the system is running in a docker container (which it mostly is),
130 // the timezone is by default GMT unless otherwise set. Just a caution.
131 std::time_t currentTime = std::time(nullptr);
132 std::tm* localTimeInfo = std::localtime(&currentTime);
133
134 long timezone_offset = localTimeInfo->tm_gmtoff;
135
136 timeinfo.tm_gmtoff = timezone_offset;
137 timeinfo.tm_isdst = localTimeInfo->tm_isdst;
138
139 // Convert to time_t
140 timeT = std::mktime(&timeinfo);
141 }
142
143 // Convert time_t to system_clock::time_point
144 auto timePoint = std::chrono::system_clock::from_time_t(timeT);
145
146 // Add milliseconds
147 int milliseconds = 0;
148 if (d_date_time.second)
149 {
150 milliseconds = static_cast<int>(d_date_time.second.value());
151 }
152 timePoint += std::chrono::milliseconds(milliseconds);
153
154 // Extract seconds and nanoseconds since epoch
155 auto duration = timePoint.time_since_epoch();
156 auto seconds = std::chrono::duration_cast<std::chrono::seconds>(duration);
157 auto nanoseconds = std::chrono::duration_cast<std::chrono::nanoseconds>(duration - seconds);
158
159 msg.sec = static_cast<int32_t>(seconds.count());
160 msg.nanosec = static_cast<uint32_t>(nanoseconds.count());
161
162 return msg;
163 }
164
165 // if simulation, we ignore the date, month, year etc because the simulation won't be that long
166 double seconds;
167 const auto fractional_secs{std::modf(
168 remove_units(units::time::second_t{d_date_time.hour.value_or(units::time::second_t{0.0})}) +
169 remove_units(units::time::second_t{d_date_time.minute.value_or(units::time::second_t{0.0})}) +
170 remove_units(units::time::second_t{d_date_time.second.value_or(units::time::second_t{0.0})}),
171 &seconds)};
172
173 msg.sec = static_cast<std::int32_t>(seconds);
174 msg.nanosec = static_cast<std::int32_t>(fractional_secs * 1e9);
175
176 return msg;
177}
178
180 -> DDateTime
181{
182 sdsm_time.second.value() += offset.measurement_time_offset;
183
184 return sdsm_time;
185}
186
187auto ned_to_enu(const PositionOffsetXYZ & offset_ned) noexcept
188{
189 auto offset_enu{offset_ned};
190
191 // NED to ENU: swap x and y axis and negate z axis
192 offset_enu.offset_x = offset_ned.offset_y;
193 offset_enu.offset_y = offset_ned.offset_x;
194
195 if (offset_enu.offset_z) {
196 offset_enu.offset_z.value() *= -1;
197 }
198
199 return offset_enu;
200}
201
202auto to_ddate_time_msg(const builtin_interfaces::msg::Time & builtin_time)
203 -> j2735_v2x_msgs::msg::DDateTime
204{
205 j2735_v2x_msgs::msg::DDateTime ddate_time_output;
206
207 // Add the time components from epoch seconds
208 boost::posix_time::ptime posix_time = boost::posix_time::from_time_t(builtin_time.sec) +
209 boost::posix_time::nanosec(builtin_time.nanosec);
210
211 const auto time_stamp_year = posix_time.date().year();
212 const auto time_stamp_month = posix_time.date().month();
213 const auto time_stamp_day = posix_time.date().day();
214
215 const auto hours_of_day = posix_time.time_of_day().hours();
216 const auto minutes_of_hour = posix_time.time_of_day().minutes();
217 const auto seconds_of_minute = posix_time.time_of_day().seconds();
218
219 ddate_time_output.presence_vector = 0;
220
221 ddate_time_output.presence_vector |= j2735_v2x_msgs::msg::DDateTime::YEAR;
222 ddate_time_output.year.year = time_stamp_year;
223 ddate_time_output.presence_vector |= j2735_v2x_msgs::msg::DDateTime::MONTH;
224 ddate_time_output.month.month = time_stamp_month;
225 ddate_time_output.presence_vector |= j2735_v2x_msgs::msg::DDateTime::DAY;
226 ddate_time_output.day.day = time_stamp_day;
227 ddate_time_output.presence_vector |= j2735_v2x_msgs::msg::DDateTime::HOUR;
228 ddate_time_output.hour.hour = hours_of_day;
229 ddate_time_output.presence_vector |= j2735_v2x_msgs::msg::DDateTime::MINUTE;
230 ddate_time_output.minute.minute = minutes_of_hour;
231 ddate_time_output.presence_vector |= j2735_v2x_msgs::msg::DDateTime::SECOND;
232 ddate_time_output.second.millisecond = seconds_of_minute;
233
234 return ddate_time_output;
235}
236
238 const builtin_interfaces::msg::Time & external_object_list_stamp,
239 const builtin_interfaces::msg::Time & external_object_stamp)
240 -> carma_v2x_msgs::msg::MeasurementTimeOffset
241{
242 carma_v2x_msgs::msg::MeasurementTimeOffset time_offset;
243
244 boost::posix_time::ptime external_object_list_time =
245 boost::posix_time::from_time_t(external_object_list_stamp.sec) +
246 boost::posix_time::nanosec(external_object_list_stamp.nanosec);
247
248 boost::posix_time::ptime external_object_time =
249 boost::posix_time::from_time_t(external_object_stamp.sec) +
250 boost::posix_time::nanosec(external_object_stamp.nanosec);
251
252 boost::posix_time::time_duration offset_duration =
253 (external_object_list_time - external_object_time);
254
255 time_offset.measurement_time_offset = offset_duration.total_seconds();
256
257 return time_offset;
258}
259
260auto to_position_msg(const UtmCoordinate & position_utm) -> geometry_msgs::msg::Point
261{
262 geometry_msgs::msg::Point msg;
263
264 msg.x = remove_units(position_utm.easting);
265 msg.y = remove_units(position_utm.northing);
266 msg.z = remove_units(position_utm.elevation);
267
268 return msg;
269}
270
271auto to_position_msg(const MapCoordinate & position_map) -> geometry_msgs::msg::Point
272{
273 geometry_msgs::msg::Point msg;
274
275 msg.x = remove_units(position_map.easting);
276 msg.y = remove_units(position_map.northing);
277 msg.z = remove_units(position_map.elevation);
278
279 return msg;
280}
281
282auto heading_to_enu_yaw(const units::angle::degree_t & heading) -> units::angle::degree_t
283{
284 return units::angle::degree_t{std::fmod(-(remove_units(heading) - 90.0) + 360.0, 360.0)};
285}
286
288 double yaw, const lanelet::BasicPoint3d & obj_pose,
289 const std::shared_ptr<lanelet::projection::LocalFrameProjector> & map_projection)
290 -> units::angle::degree_t
291{
292 // Get object geodetic position
293 lanelet::GPSPoint wgs_obj_pose = map_projection->reverse(obj_pose);
294
295 // Get WGS84 Heading
296 gsl::owner<PJ_CONTEXT *> context = proj_context_create();
297 gsl::owner<PJ *> transform = proj_create(context, map_projection->ECEF_PROJ_STR);
298 units::angle::degree_t grid_heading{std::fmod(90 - yaw + 360, 360)};
299
300 const auto factors = proj_factors(
301 transform, proj_coord(proj_torad(wgs_obj_pose.lon), proj_torad(wgs_obj_pose.lat), 0, 0));
302 units::angle::degree_t grid_convergence{proj_todeg(factors.meridian_convergence)};
303
304 auto wgs_heading = grid_convergence + grid_heading;
305
306 proj_destroy(transform);
307 proj_context_destroy(context);
308
309 return wgs_heading;
310}
311
312// determine the object position offset in m from the current reference pose
313// in map frame and external object pose
315 const geometry_msgs::msg::PoseStamped & source_pose,
316 const carma_v2x_msgs::msg::PositionOffsetXYZ & position_offset)
317 -> carma_v2x_msgs::msg::PositionOffsetXYZ
318{
319 carma_v2x_msgs::msg::PositionOffsetXYZ adjusted_offset;
320
321 adjusted_offset.offset_x.object_distance =
322 position_offset.offset_x.object_distance - source_pose.pose.position.x;
323 adjusted_offset.offset_y.object_distance =
324 position_offset.offset_y.object_distance - source_pose.pose.position.y;
325 adjusted_offset.offset_z.object_distance =
326 position_offset.offset_z.object_distance - source_pose.pose.position.z;
327 adjusted_offset.presence_vector = carma_v2x_msgs::msg::PositionOffsetXYZ::HAS_OFFSET_Z;
328
329 return adjusted_offset;
330}
331
333 const geometry_msgs::msg::PoseStamped & source_pose,
334 const std::shared_ptr<lanelet::projection::LocalFrameProjector> & map_projection)
335 -> carma_v2x_msgs::msg::Position3D
336{
337 carma_v2x_msgs::msg::Position3D ref_pos;
338 lanelet::BasicPoint3d source_pose_basicpoint{
339 source_pose.pose.position.x, source_pose.pose.position.y, 0.0};
340
341 lanelet::GPSPoint wgs84_ref_pose = map_projection->reverse(source_pose_basicpoint);
342
343 ref_pos.longitude = wgs84_ref_pose.lon;
344 ref_pos.latitude = wgs84_ref_pose.lat;
345 ref_pos.elevation = wgs84_ref_pose.ele;
346 ref_pos.elevation_exists = true;
347
348 return ref_pos;
349}
350
351// Helper function to convert a vector of uint8_t to a hex string
352// TemporaryID and octet string terms come from the SAE J2735 message definitions
353std::string to_string(const std::vector<std::uint8_t> & temporary_id) {
354 std::string str;
355 str.reserve(2 * std::size(temporary_id)); // Two hex characters per octet string
356
357 std::array<char, 2> buffer;
358 for (const auto & octet_string : temporary_id) {
359 std::to_chars(std::begin(buffer), std::end(buffer), octet_string, 16);
360 str.push_back(std::toupper(std::get<0>(buffer)));
361 str.push_back(std::toupper(std::get<1>(buffer)));
362 }
363
364 return str;
365};
366
367// Helper function to fill the type from J3224 ObjectType to CARMA Detection
369 const j3224_v2x_msgs::msg::ObjectType& j3224_obj_type)
370{
371 switch (j3224_obj_type.object_type) {
372 case j3224_obj_type.ANIMAL:
373 detection.motion_model = detection.MOTION_MODEL_CTRV;
374 // We don't have a good semantic class mapping for animals
375 detection.semantic_class = detection.SEMANTIC_CLASS_UNKNOWN;
376 break;
377 case j3224_obj_type.VRU:
378 detection.motion_model = detection.MOTION_MODEL_CTRV;
379 detection.semantic_class = detection.SEMANTIC_CLASS_PEDESTRIAN;
380 break;
381 case j3224_obj_type.VEHICLE:
382 detection.motion_model = detection.MOTION_MODEL_CTRV;
383 detection.semantic_class = detection.SEMANTIC_CLASS_SMALL_VEHICLE;
384 break;
385 default:
386 detection.motion_model = detection.MOTION_MODEL_CTRV;
387 detection.semantic_class = detection.SEMANTIC_CLASS_UNKNOWN;
388 }
389}
390// Helper function to fill all concariance related values from J3324 confidence values to covariance
392 const carma_v2x_msgs::msg::DetectedObjectCommonData& common_data,
393 const std::optional<SdsmToDetectionListConfig>& conversion_adjustment)
394{
395 // Variables to store original covariance values for debugging
396 double original_pose_covariance_x = 0.0;
397 double original_pose_covariance_y = 0.0;
398 double original_pose_covariance_z = 0.0;
399 double original_pose_covariance_yaw = 0.0;
400 double original_twist_covariance_x = 0.0;
401 double original_twist_covariance_z = 0.0;
402 double original_twist_covariance_yaw = 0.0;
403
404 // Calculate and log original pose covariance values
405 try {
406 original_pose_covariance_x =
407 0.5 * std::pow(j2735_v2x_msgs::to_double(common_data.pos_confidence.pos).value(), 2);
408 original_pose_covariance_y = original_pose_covariance_x;
409
410 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
411 "Original pose covariance X/Y: " << original_pose_covariance_x);
412 } catch (const std::bad_optional_access &) {
413 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
414 "Missing position confidence");
415 }
416
417 try {
418 original_pose_covariance_z =
419 0.5 * std::pow(j2735_v2x_msgs::to_double(common_data.pos_confidence.elevation).value(), 2);
420
421 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
422 "Original pose covariance Z: " << original_pose_covariance_z);
423 } catch (const std::bad_optional_access &) {
424 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
425 "Missing elevation confidence");
426 }
427
428 try {
429 // Get original heading/yaw covariance
430 original_pose_covariance_yaw =
431 0.5 * std::pow(j2735_v2x_msgs::to_double(common_data.heading_conf).value(), 2);
432
433 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
434 "Original pose covariance yaw: " << original_pose_covariance_yaw);
435 } catch (const std::bad_optional_access &) {
436 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
437 "Missing heading confidence");
438 }
439
440 try {
441 // Get original linear x velocity covariance
442 original_twist_covariance_x =
443 0.5 * std::pow(j2735_v2x_msgs::to_double(common_data.speed_confidence).value(), 2);
444
445 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
446 "Original twist covariance X: " << original_twist_covariance_x);
447 } catch (const std::bad_optional_access &) {
448 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
449 "Missing speed confidence");
450 }
451
452 if (!common_data.speed_z.unavailable){
453 try {
454 // Get original linear z velocity covariance
455 original_twist_covariance_z =
456 0.5 * std::pow(j2735_v2x_msgs::to_double(common_data.speed_confidence_z).value(), 2);
457
458 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
459 "Original twist covariance Z: " << original_twist_covariance_z);
460 } catch (const std::bad_optional_access &) {
461 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
462 "Missing z-speed confidence");
463 }
464 }
465 else{
466 original_twist_covariance_z = 0.0;
467 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
468 "Original twist covariance Z: 0.0 (speed_z not provided)");
469 }
470
471 // Having non-zero value means available
472 if(static_cast<bool>(common_data.accel_4_way.yaw_rate)){
473 try {
474 // Get original angular z velocity (yaw rate) covariance
475 original_twist_covariance_yaw =
476 0.5 * std::pow(j2735_v2x_msgs::to_double(common_data.acc_cfd_yaw).value(), 2);
477
478 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
479 "Original twist covariance yaw: " << original_twist_covariance_yaw);
480 } catch (const std::bad_optional_access &) {
481 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
482 "Missing yaw-rate confidence");
483 }
484 }
485 else{
486 original_twist_covariance_yaw = 0.0;
487 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
488 "Original twist covariance yaw: 0.0 (yaw_rate not provided)");
489 }
490
491 if (conversion_adjustment && conversion_adjustment.value().overwrite_covariance)
492 {
493 // Hardcoded pose covariance
494 detection.pose.covariance[0] = conversion_adjustment.value().pose_covariance_x;
495 detection.pose.covariance[7] = conversion_adjustment.value().pose_covariance_y;
496 detection.pose.covariance[14] = conversion_adjustment.value().pose_covariance_z;
497 detection.pose.covariance[35] = conversion_adjustment.value().pose_covariance_yaw;
498
499 // Hardcoded twist covariance
500 detection.twist.covariance[0] = conversion_adjustment.value().twist_covariance_x;
501 detection.twist.covariance[14] = conversion_adjustment.value().twist_covariance_z;
502 detection.twist.covariance[35] = conversion_adjustment.value().twist_covariance_yaw;
503
504 // Print comparison between original and hardcoded values
505 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
506 "POSE COVARIANCE COMPARISON - Original vs Hardcoded: " <<
507 "X: " << original_pose_covariance_x << " -> " << detection.pose.covariance[0] << ", " <<
508 "Y: " << original_pose_covariance_y << " -> " << detection.pose.covariance[7] << ", " <<
509 "Z: " << original_pose_covariance_z << " -> " << detection.pose.covariance[14] << ", " <<
510 "Yaw: " << original_pose_covariance_yaw << " -> " << detection.pose.covariance[35]);
511
512 RCLCPP_DEBUG_STREAM(rclcpp::get_logger("sdsm_to_detection_list_node"),
513 "TWIST COVARIANCE COMPARISON - Original vs Hardcoded: " <<
514 "X: " << original_twist_covariance_x << " -> , " << detection.twist.covariance[0] << ", " <<
515 "Z: " << original_twist_covariance_z << " -> , " << detection.twist.covariance[14] << ", " <<
516 "Yaw: " << original_twist_covariance_yaw << " -> " << detection.twist.covariance[35]);
517 }
518 else
519 {
520 // Original pose covariance
521 detection.pose.covariance[0] = original_pose_covariance_x;
522 detection.pose.covariance[7] = original_pose_covariance_y;
523 detection.pose.covariance[14] = original_pose_covariance_z;
524 detection.pose.covariance[35] = original_pose_covariance_yaw;
525
526 // Original twist covariance
527 detection.twist.covariance[0] = original_twist_covariance_x;
528 detection.twist.covariance[14] = original_twist_covariance_z;
529 detection.twist.covariance[35] = original_twist_covariance_yaw;
530 }
531
532 // Fill zeros for all other twist covariance values
533 for (size_t i = 0; i < 36; ++i) {
534 if (i != 0 && i != 14 && i != 35) {
535 detection.twist.covariance[i] = 0.0;
536 }
537 }
538
539 // Fill zeros for all other pose covariance values
540 for (size_t i = 0; i < 36; ++i) {
541 if (i != 0 && i != 7 && i != 14 && i != 35) {
542 detection.pose.covariance[i] = 0.0;
543 }
544 }
545}
576 const carma_v2x_msgs::msg::SensorDataSharingMessage & sdsm, std::string_view georeference,
577 bool is_simulation, const std::optional<SdsmToDetectionListConfig>& conversion_adjustment)
578 -> carma_cooperative_perception_interfaces::msg::DetectionList
579{
580 carma_cooperative_perception_interfaces::msg::DetectionList detection_list;
581 const auto ref_pos_3d{Position3D::from_msg(sdsm.ref_pos)};
582
583 units::length::meter_t elevation(0.0);
584 if(ref_pos_3d.elevation){
585 elevation = ref_pos_3d.elevation.value();
586 }
587 const Wgs84Coordinate ref_pos_wgs84{
588 ref_pos_3d.latitude, ref_pos_3d.longitude, elevation};
589
590 const auto ref_pos_map{project_to_carma_map(ref_pos_wgs84, georeference)};
591
592 for (const auto & object_data : sdsm.objects.detected_object_data) {
593 const auto common_data{object_data.detected_object_common_data};
594
596 detection.header.frame_id = "map";
597
598 const auto detection_time{calc_detection_time_stamp(
599 DDateTime::from_msg(sdsm.sdsm_time_stamp),
600 MeasurementTimeOffset::from_msg(common_data.measurement_time))};
601
602 detection.header.stamp = to_time_msg(detection_time, is_simulation);
603
604 detection.id = fmt::format("{}-{}",
606 common_data.detected_id.object_id);
607
608 const auto pos_offset_enu{ned_to_enu(PositionOffsetXYZ::from_msg(common_data.pos))};
609 detection.pose.pose.position = to_position_msg(MapCoordinate{
610 ref_pos_map.easting + pos_offset_enu.offset_x, ref_pos_map.northing + pos_offset_enu.offset_y,
611 ref_pos_map.elevation + pos_offset_enu.offset_z.value_or(units::length::meter_t{0.0})});
612
613 // Adjust object's position to match vector map coordinates as sensor calibrations are not
614 // always reliable
615 if (conversion_adjustment && conversion_adjustment.value().adjust_pose)
616 {
617 detection.pose.pose.position.x += conversion_adjustment.value().x_offset;
618 detection.pose.pose.position.y += conversion_adjustment.value().y_offset;
619 }
620
621 const auto true_heading{units::angle::degree_t{Heading::from_msg(common_data.heading).heading}};
622
623 // Note: This should really use the detection's WGS-84 position, so the
624 // convergence will be off slightly. TODO
625 const units::angle::degree_t grid_convergence{
626 calculate_grid_convergence(ref_pos_wgs84, georeference)};
627
628 const auto grid_heading{true_heading - grid_convergence};
629 const auto enu_yaw{heading_to_enu_yaw(grid_heading)};
630
631 tf2::Quaternion quat_tf;
632
633 if (conversion_adjustment && conversion_adjustment.value().adjust_pose)
634 {
635 // Adjust object's heading to match vector map coordinates as sensor calibrations are not
636 // always reliable
637 auto yaw_with_offset = units::angle::radian_t{enu_yaw} +
638 units::angle::radian_t{units::angle::degree_t{conversion_adjustment.value().yaw_offset}};
639 auto new_yaw = std::fmod(remove_units(yaw_with_offset) + 2 * M_PI, 2 * M_PI);
640 quat_tf.setRPY(0, 0, new_yaw);
641 }
642 else
643 {
644 // No adjustment needed
645 quat_tf.setRPY(0, 0, remove_units(units::angle::radian_t{enu_yaw}));
646 }
647
648 detection.pose.pose.orientation = tf2::toMsg(quat_tf);
649
650 const auto speed{Speed::from_msg(common_data.speed)};
651 detection.twist.twist.linear.x =
652 remove_units(units::velocity::meters_per_second_t{speed.speed});
653
654 if (!common_data.speed_z.unavailable){
655 const auto speed_z{Speed::from_msg(common_data.speed_z)};
656 detection.twist.twist.linear.z =
657 remove_units(units::velocity::meters_per_second_t{speed_z.speed});
658 }
659 else{
660 detection.twist.twist.linear.z = remove_units(units::velocity::meters_per_second_t{0.0});
661 }
662
663 // NOTE: common_data.accel_4_way.longitudinal, lateral, vert not supported
664 // and not needed at the moment for multiple object tracking algorithm
665 // Having non-zero yaw_rate value means available
666 if(static_cast<bool>(common_data.accel_4_way.yaw_rate)){
667 const auto accel_set{AccelerationSet4Way::from_msg(common_data.accel_4_way)};
668 detection.twist.twist.angular.z =
669 remove_units(units::angular_velocity::degrees_per_second_t{accel_set.yaw_rate});
670 }
671 else{
672 detection.twist.twist.angular.z = 0.0;
673 }
674
675 convert_covariances(detection, common_data, conversion_adjustment);
676
677 convert_object_type(detection, common_data.obj_type);
678
679 detection_list.detections.push_back(std::move(detection));
680 }
681
682 return detection_list;
683}
684
686 const carma_perception_msgs::msg::ExternalObject & object,
687 const MotionModelMapping & motion_model_mapping)
689{
691
692 detection.header = object.header;
693
694 if (object.presence_vector & object.BSM_ID_PRESENCE_VECTOR) {
695 detection.id = "";
696 std::transform(
697 std::cbegin(object.bsm_id), std::cend(object.bsm_id), std::back_inserter(detection.id),
698 [](const auto & i) { return i + '0'; });
699 }
700
701 if (object.presence_vector & object.ID_PRESENCE_VECTOR) {
702 detection.id += '-' + std::to_string(object.id);
703 }
704
705 if (object.presence_vector & object.POSE_PRESENCE_VECTOR) {
706 detection.pose = object.pose;
707 }
708
709 if (object.presence_vector & object.VELOCITY_PRESENCE_VECTOR) {
710 detection.twist = object.velocity;
711 }
712
713 if (object.presence_vector & object.OBJECT_TYPE_PRESENCE_VECTOR) {
714 switch (object.object_type) {
715 case object.SMALL_VEHICLE:
716 detection.motion_model = motion_model_mapping.small_vehicle_model;
717 detection.semantic_class = detection.SEMANTIC_CLASS_SMALL_VEHICLE;
718 break;
719 case object.LARGE_VEHICLE:
720 detection.motion_model = motion_model_mapping.large_vehicle_model;
721 detection.semantic_class = detection.SEMANTIC_CLASS_LARGE_VEHICLE;
722 break;
723 case object.MOTORCYCLE:
724 detection.motion_model = motion_model_mapping.motorcycle_model;
725 detection.semantic_class = detection.SEMANTIC_CLASS_MOTORCYCLE;
726 break;
727 case object.PEDESTRIAN:
728 detection.motion_model = motion_model_mapping.pedestrian_model;
729 detection.semantic_class = detection.SEMANTIC_CLASS_PEDESTRIAN;
730 break;
731 case object.UNKNOWN:
732 default:
733 detection.motion_model = motion_model_mapping.unknown_model;
734 detection.semantic_class = detection.SEMANTIC_CLASS_UNKNOWN;
735 }
736 }
737
738 return detection;
739}
740
742 const carma_perception_msgs::msg::ExternalObjectList & object_list,
743 const MotionModelMapping & motion_model_mapping)
744 -> carma_cooperative_perception_interfaces::msg::DetectionList
745{
746 carma_cooperative_perception_interfaces::msg::DetectionList detection_list;
747
748 std::transform(
749 std::cbegin(object_list.objects), std::cend(object_list.objects),
750 std::back_inserter(detection_list.detections),
751 [&motion_model_mapping = std::as_const(motion_model_mapping)](const auto & object) {
752 return to_detection_msg(object, motion_model_mapping);
753 });
754
755 return detection_list;
756}
757
759 -> carma_perception_msgs::msg::ExternalObject
760{
761 carma_perception_msgs::msg::ExternalObject external_object;
762 external_object.header = track.header;
763 external_object.presence_vector = 0;
764
765 const auto to_numeric_id = [](std::string string_id) -> std::optional<uint32_t> {
766 auto non_digit_start = std::remove_if(
767 std::begin(string_id), std::end(string_id),
768 [](const auto & ch) { return !std::isdigit(ch); });
769
770 std::uint32_t numeric_id;
771 const auto digit_substr_size{std::distance(std::begin(string_id), non_digit_start)};
772 if (
773 std::from_chars(string_id.c_str(), string_id.c_str() + digit_substr_size, numeric_id).ec ==
774 std::errc{}) {
775 return numeric_id;
776 }
777
778 return std::nullopt;
779 };
780
781 if (const auto numeric_id{to_numeric_id(track.id)}) {
782 external_object.presence_vector |= external_object.ID_PRESENCE_VECTOR;
783 external_object.id = numeric_id.value();
784 } else {
785 external_object.presence_vector &= ~external_object.ID_PRESENCE_VECTOR;
786 }
787
788 external_object.presence_vector |= external_object.POSE_PRESENCE_VECTOR;
789 external_object.pose = track.pose;
790
791 external_object.presence_vector |= external_object.VELOCITY_PRESENCE_VECTOR;
792
793 const auto track_longitudinal_velocity{track.twist.twist.linear.x};
794 const auto track_orientation = track.pose.pose.orientation;
795
796 tf2::Quaternion q(
797 track_orientation.x, track_orientation.y, track_orientation.z, track_orientation.w);
798 tf2::Matrix3x3 m(q);
799 double roll, pitch, yaw;
800 m.getRPY(roll, pitch, yaw);
801
802 external_object.velocity.twist.linear.x = track_longitudinal_velocity * std::cos(yaw);
803 external_object.velocity.twist.linear.y = track_longitudinal_velocity * std::sin(yaw);
804
805 external_object.object_type = track.semantic_class;
806
807 external_object.presence_vector |= external_object.OBJECT_TYPE_PRESENCE_VECTOR;
808 switch (track.semantic_class) {
809 case track.SEMANTIC_CLASS_SMALL_VEHICLE:
810 external_object.object_type = external_object.SMALL_VEHICLE;
811 break;
812 case track.SEMANTIC_CLASS_LARGE_VEHICLE:
813 external_object.object_type = external_object.LARGE_VEHICLE;
814 break;
815 case track.SEMANTIC_CLASS_MOTORCYCLE:
816 external_object.object_type = external_object.MOTORCYCLE;
817 break;
818 case track.SEMANTIC_CLASS_PEDESTRIAN:
819 external_object.object_type = external_object.PEDESTRIAN;
820 break;
821 case track.SEMANTIC_CLASS_UNKNOWN:
822 default:
823 external_object.object_type = external_object.UNKNOWN;
824 }
825
826 return external_object;
827}
828
830 const carma_cooperative_perception_interfaces::msg::TrackList & track_list)
831 -> carma_perception_msgs::msg::ExternalObjectList
832{
833 carma_perception_msgs::msg::ExternalObjectList external_object_list;
834
835 for (const auto & track : track_list.tracks) {
836 external_object_list.objects.push_back(to_external_object_msg(track));
837 }
838
839 return external_object_list;
840}
841
843 const carma_perception_msgs::msg::ExternalObjectList & external_object_list,
844 const geometry_msgs::msg::PoseStamped & current_pose,
845 const std::shared_ptr<lanelet::projection::LocalFrameProjector> & map_projection)
846 -> carma_v2x_msgs::msg::SensorDataSharingMessage
847{
848 carma_v2x_msgs::msg::SensorDataSharingMessage sdsm;
849 carma_v2x_msgs::msg::DetectedObjectList detected_object_list;
850
851 sdsm.sdsm_time_stamp = to_ddate_time_msg(external_object_list.header.stamp);
852
853 sdsm.ref_pos = transform_pose_from_map_to_wgs84(current_pose, map_projection);
854
855 // Convert external objects within the list to detected_object_data
856 for (const auto & external_object : external_object_list.objects) {
857 auto sdsm_detected_object = to_detected_object_data_msg(external_object, map_projection);
858
859 // Calculate the time offset between individual objects and the respective SDSM container msg
860 sdsm_detected_object.detected_object_common_data.measurement_time =
861 calc_sdsm_time_offset(external_object.header.stamp, external_object.header.stamp);
862
863 // Calculate the position offset from the current reference pose (in m)
864 sdsm_detected_object.detected_object_common_data.pos =
865 calc_relative_position(current_pose, sdsm_detected_object.detected_object_common_data.pos);
866
867 detected_object_list.detected_object_data.push_back(sdsm_detected_object);
868 }
869
870 std::vector<uint8_t> id = {0x00, 0x00, 0x00, 0x01};
871 sdsm.source_id.id = id;
872 sdsm.equipment_type.equipment_type = j3224_v2x_msgs::msg::EquipmentType::OBU;
873 sdsm.ref_pos_xy_conf.semi_major = j2735_v2x_msgs::msg::PositionalAccuracy::ACCURACY_UNAVAILABLE;
874 sdsm.ref_pos_xy_conf.semi_minor = j2735_v2x_msgs::msg::PositionalAccuracy::ACCURACY_UNAVAILABLE;
875 sdsm.ref_pos_xy_conf.orientation =
876 j2735_v2x_msgs::msg::PositionalAccuracy::ACCURACY_ORIENTATION_UNAVAILABLE;
877 sdsm.objects = detected_object_list;
878
879 return sdsm;
880}
881
883 const carma_perception_msgs::msg::ExternalObject & external_object,
884 const std::shared_ptr<lanelet::projection::LocalFrameProjector> & map_projection)
885 -> carma_v2x_msgs::msg::DetectedObjectData
886{
887 carma_v2x_msgs::msg::DetectedObjectData detected_object_data;
888 detected_object_data.presence_vector = 0;
889
890 carma_v2x_msgs::msg::DetectedObjectCommonData detected_object_common_data;
891 detected_object_common_data.presence_vector = 0;
892
893 // common data //////////
894
895 // obj_type_conf - convert from percentile, cast to proper uint type
896 if (external_object.presence_vector & external_object.OBJECT_TYPE_PRESENCE_VECTOR) {
897 detected_object_common_data.obj_type_cfd.classification_confidence =
898 static_cast<std::uint8_t>(external_object.confidence * 100);
899 }
900
901 // detected_id - cast proper type
902 if (external_object.presence_vector & external_object.ID_PRESENCE_VECTOR) {
903 detected_object_common_data.detected_id.object_id =
904 static_cast<std::uint16_t>(external_object.id);
905 }
906
907 // pos - Add offset to ref_pos to get object position
908 // in map frame -> convert to WGS84 coordinates for sdsm
909
910 // To get offset: Subtract the external object pose from
911 // the current vehicle location given by the current_pose topic
912 if (external_object.presence_vector & external_object.POSE_PRESENCE_VECTOR) {
913 detected_object_common_data.pos.offset_x.object_distance =
914 static_cast<float>(external_object.pose.pose.position.x);
915 detected_object_common_data.pos.offset_y.object_distance =
916 static_cast<float>(external_object.pose.pose.position.y);
917 detected_object_common_data.pos.offset_z.object_distance =
918 static_cast<float>(external_object.pose.pose.position.z);
919 }
920
921 // speed/speed_z - convert vector velocity to scalar speed val given x/y components
922 if (external_object.presence_vector & external_object.VELOCITY_PRESENCE_VECTOR) {
923 detected_object_common_data.speed.speed =
924 std::hypot(external_object.velocity.twist.linear.x, external_object.velocity.twist.linear.y);
925
926 detected_object_common_data.presence_vector |=
927 carma_v2x_msgs::msg::DetectedObjectCommonData::HAS_SPEED_Z;
928 detected_object_common_data.speed_z.speed = external_object.velocity.twist.linear.z;
929
930 // heading - convert ang vel to scale heading
931 lanelet::BasicPoint3d external_object_position{
932 external_object.pose.pose.position.x, external_object.pose.pose.position.y,
933 external_object.pose.pose.position.z};
934 // Get yaw from orientation
935 auto obj_orientation = external_object.pose.pose.orientation;
936 tf2::Quaternion q(obj_orientation.x, obj_orientation.y, obj_orientation.z, obj_orientation.w);
937 tf2::Matrix3x3 m(q);
938 double roll, pitch, yaw;
939 m.getRPY(roll, pitch, yaw);
940
941 detected_object_common_data.heading.heading =
942 remove_units(enu_orientation_to_true_heading(yaw, external_object_position, map_projection));
943 }
944
945 // optional data (determine based on object type)
946 // use object type struct for better control
947 carma_v2x_msgs::msg::DetectedObjectOptionalData detected_object_optional_data;
948
949 switch (external_object.object_type) {
950 case external_object.SMALL_VEHICLE:
951 detected_object_common_data.obj_type.object_type = j3224_v2x_msgs::msg::ObjectType::VEHICLE;
952
953 if (external_object.presence_vector & external_object.SIZE_PRESENCE_VECTOR) {
954 detected_object_optional_data.det_veh.presence_vector =
955 carma_v2x_msgs::msg::DetectedVehicleData::HAS_SIZE;
956 detected_object_optional_data.det_veh.presence_vector |=
957 carma_v2x_msgs::msg::DetectedVehicleData::HAS_HEIGHT;
958
959 detected_object_optional_data.det_veh.size.vehicle_width = external_object.size.y;
960 detected_object_optional_data.det_veh.size.vehicle_length = external_object.size.x;
961 detected_object_optional_data.det_veh.height.vehicle_height = external_object.size.z;
962 }
963 break;
964 case external_object.LARGE_VEHICLE:
965 detected_object_common_data.obj_type.object_type = j3224_v2x_msgs::msg::ObjectType::VEHICLE;
966
967 if (external_object.presence_vector & external_object.SIZE_PRESENCE_VECTOR) {
968 detected_object_optional_data.det_veh.presence_vector =
969 carma_v2x_msgs::msg::DetectedVehicleData::HAS_SIZE;
970 detected_object_optional_data.det_veh.presence_vector |=
971 carma_v2x_msgs::msg::DetectedVehicleData::HAS_HEIGHT;
972
973 detected_object_optional_data.det_veh.size.vehicle_width = external_object.size.y;
974 detected_object_optional_data.det_veh.size.vehicle_length = external_object.size.x;
975 detected_object_optional_data.det_veh.height.vehicle_height = external_object.size.z;
976 }
977 break;
978 case external_object.MOTORCYCLE:
979 detected_object_common_data.obj_type.object_type = j3224_v2x_msgs::msg::ObjectType::VEHICLE;
980
981 if (external_object.presence_vector & external_object.SIZE_PRESENCE_VECTOR) {
982 detected_object_optional_data.det_veh.presence_vector =
983 carma_v2x_msgs::msg::DetectedVehicleData::HAS_SIZE;
984 detected_object_optional_data.det_veh.presence_vector |=
985 carma_v2x_msgs::msg::DetectedVehicleData::HAS_HEIGHT;
986
987 detected_object_optional_data.det_veh.size.vehicle_width = external_object.size.y;
988 detected_object_optional_data.det_veh.size.vehicle_length = external_object.size.x;
989 detected_object_optional_data.det_veh.height.vehicle_height = external_object.size.z;
990 }
991 break;
992 case external_object.PEDESTRIAN:
993 detected_object_common_data.obj_type.object_type = j3224_v2x_msgs::msg::ObjectType::VRU;
994
995 detected_object_optional_data.det_vru.presence_vector =
996 carma_v2x_msgs::msg::DetectedVRUData::HAS_BASIC_TYPE;
997 detected_object_optional_data.det_vru.basic_type.type |=
998 j2735_v2x_msgs::msg::PersonalDeviceUserType::A_PEDESTRIAN;
999
1000 break;
1001 case external_object.UNKNOWN:
1002 default:
1003 detected_object_common_data.obj_type.object_type = j3224_v2x_msgs::msg::ObjectType::UNKNOWN;
1004
1005 if (external_object.presence_vector & external_object.SIZE_PRESENCE_VECTOR) {
1006 detected_object_optional_data.det_obst.obst_size.width.size_value = external_object.size.y;
1007 detected_object_optional_data.det_obst.obst_size.length.size_value = external_object.size.x;
1008
1009 detected_object_optional_data.det_obst.obst_size.presence_vector =
1010 carma_v2x_msgs::msg::ObstacleSize::HAS_HEIGHT;
1011 detected_object_optional_data.det_obst.obst_size.height.size_value = external_object.size.z;
1012 }
1013 }
1014
1015 detected_object_data.detected_object_common_data = std::move(detected_object_common_data);
1016 detected_object_data.detected_object_optional_data = std::move(detected_object_optional_data);
1017
1018 return detected_object_data;
1019}
1020
1021} // namespace carma_cooperative_perception
auto enu_orientation_to_true_heading(double yaw, const lanelet::BasicPoint3d &obj_pose, const std::shared_ptr< lanelet::projection::LocalFrameProjector > &map_projection) -> units::angle::degree_t
void convert_covariances(carma_cooperative_perception_interfaces::msg::Detection &detection, const carma_v2x_msgs::msg::DetectedObjectCommonData &common_data, const std::optional< SdsmToDetectionListConfig > &conversion_adjustment)
auto to_external_object_list_msg(const carma_cooperative_perception_interfaces::msg::TrackList &track_list) -> carma_perception_msgs::msg::ExternalObjectList
auto calc_sdsm_time_offset(const builtin_interfaces::msg::Time &external_object_list_time, const builtin_interfaces::msg::Time &external_object_time) -> carma_v2x_msgs::msg::MeasurementTimeOffset
std::string to_string(const std::vector< std::uint8_t > &temporary_id)
auto calculate_grid_convergence(const Wgs84Coordinate &position, const UtmZone &zone) -> units::angle::degree_t
Calculate grid convergence at a given position.
Definition: geodetic.cpp:154
auto to_ddate_time_msg(const builtin_interfaces::msg::Time &builtin_time) -> j2735_v2x_msgs::msg::DDateTime
auto calc_detection_time_stamp(DDateTime d_date_time, const MeasurementTimeOffset &offset) -> DDateTime
std::variant< multiple_object_tracking::CtrvDetection, multiple_object_tracking::CtraDetection > Detection
auto project_to_carma_map(const Wgs84Coordinate &coordinate, std::string_view proj_string) -> MapCoordinate
Definition: geodetic.cpp:55
auto to_string(const UtmZone &zone) -> std::string
Definition: utm_zone.cpp:21
auto to_detection_msg(const carma_perception_msgs::msg::ExternalObject &object, const MotionModelMapping &motion_model_mapping) -> carma_cooperative_perception_interfaces::msg::Detection
auto to_time_msg(const DDateTime &d_date_time, bool is_simulation) -> builtin_interfaces::msg::Time
auto ned_to_enu(const PositionOffsetXYZ &offset_ned) noexcept
auto to_detected_object_data_msg(const carma_perception_msgs::msg::ExternalObject &external_object, const std::shared_ptr< lanelet::projection::LocalFrameProjector > &map_projection) -> carma_v2x_msgs::msg::DetectedObjectData
void convert_object_type(carma_cooperative_perception_interfaces::msg::Detection &detection, const j3224_v2x_msgs::msg::ObjectType &j3224_obj_type)
auto to_external_object_msg(const carma_cooperative_perception_interfaces::msg::Track &track) -> carma_perception_msgs::msg::ExternalObject
auto heading_to_enu_yaw(const units::angle::degree_t &heading) -> units::angle::degree_t
auto calc_relative_position(const geometry_msgs::msg::PoseStamped &current_pose, const carma_v2x_msgs::msg::PositionOffsetXYZ &detected_object_data) -> carma_v2x_msgs::msg::PositionOffsetXYZ
auto to_sdsm_msg(const carma_perception_msgs::msg::ExternalObjectList &external_object_list, const geometry_msgs::msg::PoseStamped &current_pose, const std::shared_ptr< lanelet::projection::LocalFrameProjector > &map_projection) -> carma_v2x_msgs::msg::SensorDataSharingMessage
constexpr auto remove_units(const T &value)
auto to_position_msg(const UtmCoordinate &position_utm) -> geometry_msgs::msg::Point
std::variant< multiple_object_tracking::CtrvTrack, multiple_object_tracking::CtraTrack > Track
auto transform_pose_from_map_to_wgs84(const geometry_msgs::msg::PoseStamped &source_pose, const std::shared_ptr< lanelet::projection::LocalFrameProjector > &map_projection) -> carma_v2x_msgs::msg::Position3D
auto to_detection_list_msg(const carma_v2x_msgs::msg::SensorDataSharingMessage &sdsm, std::string_view georeference, bool is_simulation, const std::optional< SdsmToDetectionListConfig > &conversion_adjustment) -> carma_cooperative_perception_interfaces::msg::DetectionList
Converts a carma_v2x_msgs::msg::SensorDataSharingMessage (SDSM) to carma_cooperative_perception_inter...
static auto from_msg(const j2735_v2x_msgs::msg::AccelerationSet4Way &msg) -> AccelerationSet4Way
Definition: j2735_types.cpp:55
std::optional< units::time::millisecond_t > second
Definition: j2735_types.hpp:43
static auto from_msg(const j2735_v2x_msgs::msg::DDateTime &msg) -> DDateTime
Definition: j2735_types.cpp:19
static auto from_msg(const j2735_v2x_msgs::msg::Heading &heading) -> Heading
static auto from_msg(const j3224_v2x_msgs::msg::MeasurementTimeOffset &msg) -> MeasurementTimeOffset
Definition: j3224_types.cpp:47
static auto from_msg(const j2735_v2x_msgs::msg::Position3D &msg) -> Position3D
Definition: j2735_types.cpp:75
static auto from_msg(const j3224_v2x_msgs::msg::PositionOffsetXYZ &msg) -> PositionOffsetXYZ
Definition: j3224_types.cpp:19
static auto from_msg(const j2735_v2x_msgs::msg::Speed &speed) -> Speed
Represents a position using UTM coordinates.
Definition: geodetic.hpp:50
Represents a position using WGS-84 coordinates.
Definition: geodetic.hpp:33