Carma-platform v4.2.0
CARMA Platform is built on robot operating system (ROS) and utilizes open source software (OSS) that enables Cooperative Driving Automation (CDA) features to allow Automated Driving Systems to interact and cooperate with infrastructure and other vehicles through communication.
mobility_path_to_external_object.cpp
Go to the documentation of this file.
1// Copyright 2019-2023 Leidos
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#include <tf2/LinearMath/Transform.h>
16
17#include <string>
18#include <utility>
19
21#include <rclcpp/logger.hpp>
22#include <rclcpp/logging.hpp>
23
24#include <carma_perception_msgs/msg/external_object.hpp>
25#include <carma_v2x_msgs/msg/mobility_path.hpp>
26
27namespace motion_computation
28{
29namespace conversion
30{
32 const carma_v2x_msgs::msg::MobilityPath & in_msg,
33 carma_perception_msgs::msg::ExternalObject & out_msg,
34 const lanelet::projection::LocalFrameProjector & map_projector)
35{
36 constexpr double mobility_path_points_timestep_size =
37 0.1; // Mobility path timestep size per message spec
38
39 out_msg.size.x = 2.5; // TODO(carma) identify better approach for object size in mobility path
40 out_msg.size.y = 2.25;
41 out_msg.size.z = 2.0;
42
43 // get reference origin in ECEF (convert from cm to m)
44 double ecef_x = static_cast<double>(in_msg.trajectory.location.ecef_x) / 100.0;
45 double ecef_y = static_cast<double>(in_msg.trajectory.location.ecef_y) / 100.0;
46 double ecef_z = static_cast<double>(in_msg.trajectory.location.ecef_z) / 100.0;
47
48 // Convert general information
49 // clang-off
50 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::ID_PRESENCE_VECTOR;
51 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::POSE_PRESENCE_VECTOR;
52 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::VELOCITY_PRESENCE_VECTOR;
53 out_msg.presence_vector |=
54 carma_perception_msgs::msg::ExternalObject::OBJECT_TYPE_PRESENCE_VECTOR;
55 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::BSM_ID_PRESENCE_VECTOR;
56 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::DYNAMIC_OBJ_PRESENCE;
57 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::PREDICTION_PRESENCE_VECTOR;
58 // clang-on
59
60 out_msg.object_type = carma_perception_msgs::msg::ExternalObject::SMALL_VEHICLE;
61 std::hash<std::string> hasher;
62
63 // TODO(carma) hasher returns size_t, message accept uint32_t which we might lose info
64 auto hashed = hasher(in_msg.m_header.sender_id);
65 out_msg.id = static_cast<uint32_t>(hashed);
66
67 // convert hex std::string to uint8_t array
68 for (size_t i = 0; i < in_msg.m_header.sender_bsm_id.size(); i += 2) {
69 unsigned int num = 0;
70 sscanf(in_msg.m_header.sender_bsm_id.substr(i, i + 2).c_str(), "%x", &num);
71 out_msg.bsm_id.push_back((uint8_t)num);
72 }
73
74 // first point's timestamp
75 out_msg.header.stamp =
76 rclcpp::Time((uint64_t)in_msg.m_header.timestamp * 1e6); // ms to nanoseconds
77
78 // If it is a static object, we finished processing
79 if (in_msg.trajectory.offsets.size() < 2) {
80 out_msg.dynamic_obj = false;
81 return;
82 }
83 out_msg.dynamic_obj = true;
84
85 // get planned trajectory points
86 carma_perception_msgs::msg::PredictedState prev_state;
87 tf2::Vector3 prev_pt_ecef{ecef_x, ecef_y, ecef_z};
88
89 auto prev_pt_map = impl::transform_to_map_frame(prev_pt_ecef, map_projector);
90 double prev_yaw = 0.0;
91
92 double message_offset_x = 0.0; // units cm
93 double message_offset_y = 0.0;
94 double message_offset_z = 0.0;
95
96 rclcpp::Duration mobility_path_point_delta_t(mobility_path_points_timestep_size * 1e9);
97
98 // Note the usage of current vs previous in this loop can be a bit confusing
99 // The intended behavior is we our always storing our prev_point but using
100 // curr_pt for computing velocity at prev_point
101 for (size_t i = 0; i < in_msg.trajectory.offsets.size(); i++) {
102 auto curr_pt_msg = in_msg.trajectory.offsets[i];
103
104 message_offset_x = static_cast<double>(curr_pt_msg.offset_x) + message_offset_x;
105 message_offset_y = static_cast<double>(curr_pt_msg.offset_y) + message_offset_y;
106 message_offset_z = static_cast<double>(curr_pt_msg.offset_z) + message_offset_z;
107
108 tf2::Vector3 curr_pt_ecef{
109 ecef_x + message_offset_x / 100.0, ecef_y + message_offset_y / 100.0,
110 ecef_z + message_offset_z /
111 100.0}; // ecef_x is in m while message_offset_x is in cm. Want m as final result
112 auto curr_pt_map = impl::transform_to_map_frame(curr_pt_ecef, map_projector);
113
114 carma_perception_msgs::msg::PredictedState curr_state;
115
116 if (i == 0) // First point's state should be stored outside "predictions"
117 {
118 rclcpp::Time prev_stamp_as_time = rclcpp::Time(out_msg.header.stamp);
119 rclcpp::Time updated_time_step = prev_stamp_as_time + mobility_path_point_delta_t;
120
122 curr_pt_map, prev_pt_map, prev_stamp_as_time, updated_time_step,
123 prev_yaw); // Position returned is that of prev_pt_map NOT curr_pt_map
124 curr_state = std::get<0>(res);
125 prev_yaw = std::get<1>(res);
126 // Compute out_msg pose
127 out_msg.pose.pose = curr_state.predicted_position; // Orientation computed from first point
128 // in offsets with location
129 out_msg.velocity.twist = curr_state.predicted_velocity; // Velocity derived from first point
130
131 } else {
132 // NOTE: This is where the time increment happens but it feels incorrect.
133 // since the corresponding data is actually from the original prev_state stamp
134 rclcpp::Time prev_stamp_as_time =
135 rclcpp::Time(prev_state.header.stamp) + mobility_path_point_delta_t;
136 rclcpp::Time updated_time_step = prev_stamp_as_time + mobility_path_point_delta_t;
137
139 curr_pt_map, prev_pt_map, prev_stamp_as_time, updated_time_step, prev_yaw);
140 curr_state = std::get<0>(res);
141 prev_yaw = std::get<1>(res);
142 out_msg.predictions.push_back(curr_state);
143 }
144
145 if (
146 i == in_msg.trajectory.offsets.size() -
147 1) // if last point, copy the prev_state velocity & orientation to the last point too
148 {
149 curr_state.predicted_position.position.x = curr_pt_map.x();
150 curr_state.predicted_position.position.y = curr_pt_map.y();
151 curr_state.predicted_position.position.z = curr_pt_map.z();
152 out_msg.predictions.push_back(curr_state);
153 }
154
155 prev_state = curr_state;
156 prev_pt_map = curr_pt_map;
157 }
158
160
161 return;
162}
163
164namespace impl
165{
166std::pair<carma_perception_msgs::msg::PredictedState, double> composePredictedState(
167 const tf2::Vector3 & curr_pt, const tf2::Vector3 & prev_pt, const rclcpp::Time & prev_time_stamp,
168 const rclcpp::Time & curr_time_stamp, double prev_yaw)
169{
170 carma_perception_msgs::msg::PredictedState output_state;
171 // Set Position
172 output_state.predicted_position.position.x = prev_pt.x();
173 output_state.predicted_position.position.y = prev_pt.y();
174 output_state.predicted_position.position.z = prev_pt.z();
175
176 // Set Orientation
177 Eigen::Vector2d vehicle_vector = {curr_pt.x() - prev_pt.x(), curr_pt.y() - prev_pt.y()};
178 Eigen::Vector2d x_axis = {1, 0};
179 double yaw = 0.0;
180 if (
181 vehicle_vector.norm() <
182 0.000001) { // If there is zero magnitude use previous yaw to avoid divide by 0
183 yaw = prev_yaw;
184 RCLCPP_DEBUG_STREAM(
185 rclcpp::get_logger("motion_computation::conversion"),
186 "Two identical points sent for predicting heading. Forced to use previous yaw or 0 if "
187 "first "
188 "point");
189 } else {
190 yaw = std::acos(vehicle_vector.dot(x_axis) / (vehicle_vector.norm() * x_axis.norm()));
191 }
192
193 tf2::Quaternion vehicle_orientation;
194 vehicle_orientation.setRPY(0, 0, yaw);
195 output_state.predicted_position.orientation.x = vehicle_orientation.getX();
196 output_state.predicted_position.orientation.y = vehicle_orientation.getY();
197 output_state.predicted_position.orientation.z = vehicle_orientation.getZ();
198 output_state.predicted_position.orientation.w = vehicle_orientation.getW();
199
200 // Set velocity
201 output_state.predicted_velocity.linear.x =
202
203 vehicle_vector.norm() / (curr_time_stamp - prev_time_stamp).seconds();
204
205 // Set timestamp
206 output_state.header.stamp = builtin_interfaces::msg::Time(prev_time_stamp);
207
208 return std::make_pair(output_state, yaw);
209}
210
211void calculateAngVelocityOfPredictedStates(carma_perception_msgs::msg::ExternalObject & object)
212{
213 if (!object.dynamic_obj || object.predictions.size() == 0) {
214 return;
215 }
216
217 // Object's current angular velocity
218 double dt = (rclcpp::Time(object.header.stamp) - rclcpp::Time(object.predictions[0].header.stamp))
219 .seconds();
220 object.velocity.twist.angular.z =
221 (impl::getYawFromQuaternionMsg(object.pose.pose.orientation) -
222 impl::getYawFromQuaternionMsg(object.predictions[0].predicted_position.orientation)) /
223 dt;
224
225 // Predictions' angular velocities
226 auto prev_orient = object.pose.pose.orientation;
227 auto prev_time = rclcpp::Time(object.predictions[0].header.stamp);
228 for (auto & pred : object.predictions) {
229 dt = (rclcpp::Time(pred.header.stamp) - prev_time).seconds();
230 pred.predicted_velocity.angular.z =
231 (impl::getYawFromQuaternionMsg(prev_orient) -
232 impl::getYawFromQuaternionMsg(pred.predicted_position.orientation)) /
233 dt;
234 }
235}
236
237double getYawFromQuaternionMsg(const geometry_msgs::msg::Quaternion & quaternion)
238{
239 tf2::Quaternion orientation;
240 orientation.setX(quaternion.x);
241 orientation.setY(quaternion.y);
242 orientation.setZ(quaternion.z);
243 orientation.setW(quaternion.w);
244
245 double roll;
246 double pitch;
247 double yaw;
248 tf2::Matrix3x3(orientation).getRPY(roll, pitch, yaw);
249
250 return yaw;
251}
252
254 const tf2::Vector3 & ecef_point, const lanelet::projection::LocalFrameProjector & map_projector)
255{
256 lanelet::BasicPoint3d map_point = map_projector.projectECEF(
257 {ecef_point.x(), ecef_point.y(), ecef_point.z()},
258 -1); // Input should already be converted to m
259
260 return tf2::Vector3(map_point.x(), map_point.y(), map_point.z());
261}
262} // namespace impl
263
264} // namespace conversion
265
266} // namespace motion_computation
double getYawFromQuaternionMsg(const geometry_msgs::msg::Quaternion &quaternion)
Gets the yaw angle in degrees described by the provided quaternion.
tf2::Vector3 transform_to_map_frame(const tf2::Vector3 &ecef_point, const lanelet::projection::LocalFrameProjector &map_projector)
Transforms ecef point to map frame using internally saved map transform.
void calculateAngVelocityOfPredictedStates(carma_perception_msgs::msg::ExternalObject &object)
Helper function to fill in the angular velocity of the external object.
std::pair< carma_perception_msgs::msg::PredictedState, double > composePredictedState(const tf2::Vector3 &curr_pt, const tf2::Vector3 &prev_pt, const rclcpp::Time &prev_time_stamp, const rclcpp::Time &curr_time_stamp, double prev_yaw)
Composes a PredictedState message form a provided current point and previous point....
void convert(const carma_v2x_msgs::msg::PSM &in_msg, carma_perception_msgs::msg::ExternalObject &out_msg, const std::string &map_frame_id, double pred_period, double pred_step_size, const lanelet::projection::LocalFrameProjector &map_projector, const tf2::Quaternion &ned_in_map_rotation, rclcpp::node_interfaces::NodeClockInterface::SharedPtr node_clock)