Carma-platform v4.2.0
CARMA Platform is built on robot operating system (ROS) and utilizes open source software (OSS) that enables Cooperative Driving Automation (CDA) features to allow Automated Driving Systems to interact and cooperate with infrastructure and other vehicles through communication.
mobility_path_to_external_object.cpp
Go to the documentation of this file.
1// Copyright 2019-2023 Leidos
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#include <tf2/LinearMath/Transform.h>
16
17#include <string>
18#include <utility>
19
21#include <rclcpp/logger.hpp>
22#include <rclcpp/logging.hpp>
23
24#include <carma_perception_msgs/msg/external_object.hpp>
25#include <carma_v2x_msgs/msg/mobility_path.hpp>
26
27namespace motion_computation
28{
29namespace conversion
30{
32 const carma_v2x_msgs::msg::MobilityPath & in_msg,
33 carma_perception_msgs::msg::ExternalObject & out_msg,
34 const lanelet::projection::LocalFrameProjector & map_projector)
35{
36 constexpr double mobility_path_points_timestep_size =
37 0.1; // Mobility path timestep size per message spec
38
39 out_msg.size.x = 2.5; // TODO(carma) identify better approach for object size in mobility path
40 out_msg.size.y = 2.25;
41 out_msg.size.z = 2.0;
42
43 // get reference origin in ECEF (convert from cm to m)
44 double ecef_x = static_cast<double>(in_msg.trajectory.location.ecef_x) / 100.0;
45 double ecef_y = static_cast<double>(in_msg.trajectory.location.ecef_y) / 100.0;
46 double ecef_z = static_cast<double>(in_msg.trajectory.location.ecef_z) / 100.0;
47
48 // Convert general information
49 // clang-off
50 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::ID_PRESENCE_VECTOR;
51 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::POSE_PRESENCE_VECTOR;
52 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::VELOCITY_PRESENCE_VECTOR;
53 out_msg.presence_vector |=
54 carma_perception_msgs::msg::ExternalObject::OBJECT_TYPE_PRESENCE_VECTOR;
55 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::BSM_ID_PRESENCE_VECTOR;
56 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::DYNAMIC_OBJ_PRESENCE;
57 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::PREDICTION_PRESENCE_VECTOR;
58 // clang-on
59
60 out_msg.object_type = carma_perception_msgs::msg::ExternalObject::SMALL_VEHICLE;
61 std::hash<std::string> hasher;
62
63 // TODO(carma) hasher returns size_t, message accept uint32_t which we might lose info
64 auto hashed = hasher(in_msg.m_header.sender_id);
65 out_msg.id = static_cast<uint32_t>(hashed);
66
67 // convert hex std::string to uint8_t array
68 for (size_t i = 0; i < in_msg.m_header.sender_bsm_id.size(); i += 2) {
69 unsigned int num = 0;
70 sscanf(in_msg.m_header.sender_bsm_id.substr(i, i + 2).c_str(), "%x", &num);
71 out_msg.bsm_id.push_back((uint8_t)num);
72 }
73
74 // first point's timestamp
75 out_msg.header.stamp =
76 rclcpp::Time((uint64_t)in_msg.m_header.timestamp * 1e6); // ms to nanoseconds
77
78 // If it is a static object, we finished processing
79 if (in_msg.trajectory.offsets.size() < 2) {
80 out_msg.dynamic_obj = false;
81 return;
82 }
83 out_msg.dynamic_obj = true;
84
85 // get planned trajectory points
86 carma_perception_msgs::msg::PredictedState prev_state;
87 tf2::Vector3 prev_pt_ecef{ecef_x, ecef_y, ecef_z};
88
89 auto prev_pt_map = impl::transform_to_map_frame(prev_pt_ecef, map_projector);
90 double prev_yaw = 0.0;
91
92 double message_offset_x = 0.0; // units cm
93 double message_offset_y = 0.0;
94 double message_offset_z = 0.0;
95
96 rclcpp::Duration mobility_path_point_delta_t =
97 rclcpp::Duration::from_nanoseconds(mobility_path_points_timestep_size * 1e9);
98
99 // Note the usage of current vs previous in this loop can be a bit confusing
100 // The intended behavior is we our always storing our prev_point but using
101 // curr_pt for computing velocity at prev_point
102 for (size_t i = 0; i < in_msg.trajectory.offsets.size(); i++) {
103 auto curr_pt_msg = in_msg.trajectory.offsets[i];
104
105 message_offset_x = static_cast<double>(curr_pt_msg.offset_x) + message_offset_x;
106 message_offset_y = static_cast<double>(curr_pt_msg.offset_y) + message_offset_y;
107 message_offset_z = static_cast<double>(curr_pt_msg.offset_z) + message_offset_z;
108
109 tf2::Vector3 curr_pt_ecef{
110 ecef_x + message_offset_x / 100.0, ecef_y + message_offset_y / 100.0,
111 ecef_z + message_offset_z /
112 100.0}; // ecef_x is in m while message_offset_x is in cm. Want m as final result
113 auto curr_pt_map = impl::transform_to_map_frame(curr_pt_ecef, map_projector);
114
115 carma_perception_msgs::msg::PredictedState curr_state;
116
117 if (i == 0) // First point's state should be stored outside "predictions"
118 {
119 rclcpp::Time prev_stamp_as_time = rclcpp::Time(out_msg.header.stamp);
120 rclcpp::Time updated_time_step = prev_stamp_as_time + mobility_path_point_delta_t;
121
123 curr_pt_map, prev_pt_map, prev_stamp_as_time, updated_time_step,
124 prev_yaw); // Position returned is that of prev_pt_map NOT curr_pt_map
125 curr_state = std::get<0>(res);
126 prev_yaw = std::get<1>(res);
127 // Compute out_msg pose
128 out_msg.pose.pose = curr_state.predicted_position; // Orientation computed from first point
129 // in offsets with location
130 out_msg.velocity.twist = curr_state.predicted_velocity; // Velocity derived from first point
131
132 } else {
133 // NOTE: This is where the time increment happens but it feels incorrect.
134 // since the corresponding data is actually from the original prev_state stamp
135 rclcpp::Time prev_stamp_as_time =
136 rclcpp::Time(prev_state.header.stamp) + mobility_path_point_delta_t;
137 rclcpp::Time updated_time_step = prev_stamp_as_time + mobility_path_point_delta_t;
138
140 curr_pt_map, prev_pt_map, prev_stamp_as_time, updated_time_step, prev_yaw);
141 curr_state = std::get<0>(res);
142 prev_yaw = std::get<1>(res);
143 out_msg.predictions.push_back(curr_state);
144 }
145
146 if (
147 i == in_msg.trajectory.offsets.size() -
148 1) // if last point, copy the prev_state velocity & orientation to the last point too
149 {
150 curr_state.predicted_position.position.x = curr_pt_map.x();
151 curr_state.predicted_position.position.y = curr_pt_map.y();
152 curr_state.predicted_position.position.z = curr_pt_map.z();
153 out_msg.predictions.push_back(curr_state);
154 }
155
156 prev_state = curr_state;
157 prev_pt_map = curr_pt_map;
158 }
159
161
162 return;
163}
164
165namespace impl
166{
167std::pair<carma_perception_msgs::msg::PredictedState, double> composePredictedState(
168 const tf2::Vector3 & curr_pt, const tf2::Vector3 & prev_pt, const rclcpp::Time & prev_time_stamp,
169 const rclcpp::Time & curr_time_stamp, double prev_yaw)
170{
171 carma_perception_msgs::msg::PredictedState output_state;
172 // Set Position
173 output_state.predicted_position.position.x = prev_pt.x();
174 output_state.predicted_position.position.y = prev_pt.y();
175 output_state.predicted_position.position.z = prev_pt.z();
176
177 // Set Orientation
178 Eigen::Vector2d vehicle_vector = {curr_pt.x() - prev_pt.x(), curr_pt.y() - prev_pt.y()};
179 Eigen::Vector2d x_axis = {1, 0};
180 double yaw = 0.0;
181 if (
182 vehicle_vector.norm() <
183 0.000001) { // If there is zero magnitude use previous yaw to avoid divide by 0
184 yaw = prev_yaw;
185 RCLCPP_DEBUG_STREAM(
186 rclcpp::get_logger("motion_computation::conversion"),
187 "Two identical points sent for predicting heading. Forced to use previous yaw or 0 if "
188 "first "
189 "point");
190 } else {
191 yaw = std::acos(vehicle_vector.dot(x_axis) / (vehicle_vector.norm() * x_axis.norm()));
192 }
193
194 tf2::Quaternion vehicle_orientation;
195 vehicle_orientation.setRPY(0, 0, yaw);
196 output_state.predicted_position.orientation.x = vehicle_orientation.getX();
197 output_state.predicted_position.orientation.y = vehicle_orientation.getY();
198 output_state.predicted_position.orientation.z = vehicle_orientation.getZ();
199 output_state.predicted_position.orientation.w = vehicle_orientation.getW();
200
201 // Set velocity
202 output_state.predicted_velocity.linear.x =
203
204 vehicle_vector.norm() / (curr_time_stamp - prev_time_stamp).seconds();
205
206 // Set timestamp
207 output_state.header.stamp = builtin_interfaces::msg::Time(prev_time_stamp);
208
209 return std::make_pair(output_state, yaw);
210}
211
212void calculateAngVelocityOfPredictedStates(carma_perception_msgs::msg::ExternalObject & object)
213{
214 if (!object.dynamic_obj || object.predictions.size() == 0) {
215 return;
216 }
217
218 // Object's current angular velocity
219 double dt = (rclcpp::Time(object.header.stamp) - rclcpp::Time(object.predictions[0].header.stamp))
220 .seconds();
221 object.velocity.twist.angular.z =
222 (impl::getYawFromQuaternionMsg(object.pose.pose.orientation) -
223 impl::getYawFromQuaternionMsg(object.predictions[0].predicted_position.orientation)) /
224 dt;
225
226 // Predictions' angular velocities
227 auto prev_orient = object.pose.pose.orientation;
228 auto prev_time = rclcpp::Time(object.predictions[0].header.stamp);
229 for (auto & pred : object.predictions) {
230 dt = (rclcpp::Time(pred.header.stamp) - prev_time).seconds();
231 pred.predicted_velocity.angular.z =
232 (impl::getYawFromQuaternionMsg(prev_orient) -
233 impl::getYawFromQuaternionMsg(pred.predicted_position.orientation)) /
234 dt;
235 }
236}
237
238double getYawFromQuaternionMsg(const geometry_msgs::msg::Quaternion & quaternion)
239{
240 tf2::Quaternion orientation;
241 orientation.setX(quaternion.x);
242 orientation.setY(quaternion.y);
243 orientation.setZ(quaternion.z);
244 orientation.setW(quaternion.w);
245
246 double roll;
247 double pitch;
248 double yaw;
249 tf2::Matrix3x3(orientation).getRPY(roll, pitch, yaw);
250
251 return yaw;
252}
253
255 const tf2::Vector3 & ecef_point, const lanelet::projection::LocalFrameProjector & map_projector)
256{
257 lanelet::BasicPoint3d map_point = map_projector.projectECEF(
258 {ecef_point.x(), ecef_point.y(), ecef_point.z()},
259 -1); // Input should already be converted to m
260
261 return tf2::Vector3(map_point.x(), map_point.y(), map_point.z());
262}
263} // namespace impl
264
265} // namespace conversion
266
267} // namespace motion_computation
double getYawFromQuaternionMsg(const geometry_msgs::msg::Quaternion &quaternion)
Gets the yaw angle in degrees described by the provided quaternion.
tf2::Vector3 transform_to_map_frame(const tf2::Vector3 &ecef_point, const lanelet::projection::LocalFrameProjector &map_projector)
Transforms ecef point to map frame using internally saved map transform.
void calculateAngVelocityOfPredictedStates(carma_perception_msgs::msg::ExternalObject &object)
Helper function to fill in the angular velocity of the external object.
std::pair< carma_perception_msgs::msg::PredictedState, double > composePredictedState(const tf2::Vector3 &curr_pt, const tf2::Vector3 &prev_pt, const rclcpp::Time &prev_time_stamp, const rclcpp::Time &curr_time_stamp, double prev_yaw)
Composes a PredictedState message form a provided current point and previous point....
void convert(const carma_v2x_msgs::msg::PSM &in_msg, carma_perception_msgs::msg::ExternalObject &out_msg, const std::string &map_frame_id, double pred_period, double pred_step_size, const lanelet::projection::LocalFrameProjector &map_projector, const tf2::Quaternion &ned_in_map_rotation, rclcpp::node_interfaces::NodeClockInterface::SharedPtr node_clock)