Maxwell Henderson | 123c817 | 2024-03-01 22:54:16 -0800 | [diff] [blame] | 1 | #include <string> |
| 2 | |
| 3 | #include "Eigen/Dense" |
| 4 | #include "opencv2/aruco.hpp" |
| 5 | #include "opencv2/calib3d.hpp" |
| 6 | #include "opencv2/core/eigen.hpp" |
| 7 | #include "opencv2/features2d.hpp" |
| 8 | #include "opencv2/highgui.hpp" |
| 9 | #include "opencv2/highgui/highgui.hpp" |
| 10 | #include "opencv2/imgproc.hpp" |
| 11 | |
| 12 | #include "aos/configuration.h" |
| 13 | #include "aos/events/logging/log_reader.h" |
| 14 | #include "aos/events/simulated_event_loop.h" |
| 15 | #include "aos/init.h" |
| 16 | #include "aos/util/mcap_logger.h" |
| 17 | #include "frc971/constants/constants_sender_lib.h" |
| 18 | #include "frc971/control_loops/pose.h" |
| 19 | #include "frc971/vision/calibration_generated.h" |
| 20 | #include "frc971/vision/charuco_lib.h" |
| 21 | #include "frc971/vision/target_mapper.h" |
| 22 | #include "frc971/vision/vision_util_lib.h" |
| 23 | #include "frc971/vision/visualize_robot.h" |
| 24 | #include "y2024/constants/simulated_constants_sender.h" |
| 25 | #include "y2024/vision/vision_util.h" |
| 26 | |
| 27 | DEFINE_string(config, "", |
| 28 | "If set, override the log's config file with this one."); |
| 29 | DEFINE_string(constants_path, "y2024/constants/constants.json", |
| 30 | "Path to the constant file"); |
| 31 | DEFINE_string(dump_constraints_to, "/tmp/mapping_constraints.txt", |
| 32 | "Write the target constraints to this path"); |
| 33 | DEFINE_string(dump_stats_to, "/tmp/mapping_stats.txt", |
| 34 | "Write the mapping stats to this path"); |
| 35 | DEFINE_string(field_name, "charged_up", |
| 36 | "Field name, for the output json filename and flatbuffer field"); |
| 37 | DEFINE_string(json_path, "y2024/vision/maps/target_map.json", |
| 38 | "Specify path for json with initial pose guesses."); |
| 39 | DEFINE_double(max_pose_error, 1e-6, |
| 40 | "Throw out target poses with a higher pose error than this"); |
| 41 | DEFINE_double( |
| 42 | max_pose_error_ratio, 0.4, |
| 43 | "Throw out target poses with a higher pose error ratio than this"); |
| 44 | DEFINE_string(mcap_output_path, "", "Log to output."); |
| 45 | DEFINE_string(output_dir, "y2024/vision/maps", |
| 46 | "Directory to write solved target map to"); |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 47 | DEFINE_double(pause_on_distance, 2.0, |
Maxwell Henderson | 123c817 | 2024-03-01 22:54:16 -0800 | [diff] [blame] | 48 | "Pause if two consecutive implied robot positions differ by more " |
| 49 | "than this many meters"); |
| 50 | DEFINE_string(orin, "orin1", |
| 51 | "Orin name to generate mcap log for; defaults to pi1."); |
| 52 | DEFINE_uint64(skip_to, 1, |
| 53 | "Start at combined image of this number (1 is the first image)"); |
| 54 | DEFINE_bool(solve, true, "Whether to solve for the field's target map."); |
| 55 | DEFINE_int32(team_number, 0, |
| 56 | "Required: Use the calibration for a node with this team number"); |
| 57 | DEFINE_uint64(wait_key, 1, |
| 58 | "Time in ms to wait between images, if no click (0 to wait " |
| 59 | "indefinitely until click)."); |
| 60 | |
| 61 | DECLARE_int32(frozen_target_id); |
| 62 | DECLARE_int32(min_target_id); |
| 63 | DECLARE_int32(max_target_id); |
| 64 | DECLARE_bool(visualize_solver); |
| 65 | |
| 66 | namespace y2024::vision { |
| 67 | using frc971::vision::DataAdapter; |
| 68 | using frc971::vision::ImageCallback; |
| 69 | using frc971::vision::PoseUtils; |
| 70 | using frc971::vision::TargetMap; |
| 71 | using frc971::vision::TargetMapper; |
| 72 | using frc971::vision::VisualizeRobot; |
| 73 | namespace calibration = frc971::vision::calibration; |
| 74 | |
| 75 | // Class to handle reading target poses from a replayed log, |
| 76 | // displaying various debug info, and passing the poses to |
| 77 | // frc971::vision::TargetMapper for field mapping. |
| 78 | class TargetMapperReplay { |
| 79 | public: |
| 80 | TargetMapperReplay(aos::logger::LogReader *reader); |
| 81 | |
| 82 | // Solves for the target poses with the accumulated detections if FLAGS_solve. |
| 83 | void MaybeSolve(); |
| 84 | |
| 85 | private: |
| 86 | static constexpr int kImageWidth = 1280; |
| 87 | // Map from pi node name to color for drawing |
| 88 | static const std::map<std::string, cv::Scalar> kOrinColors; |
| 89 | // Contains fixed target poses without solving, for use with visualization |
| 90 | static const TargetMapper kFixedTargetMapper; |
| 91 | |
| 92 | // Change reference frame from camera to robot |
| 93 | static Eigen::Affine3d CameraToRobotDetection(Eigen::Affine3d H_camera_target, |
| 94 | Eigen::Affine3d extrinsics); |
| 95 | |
| 96 | // Adds april tag detections into the detection list, and handles |
| 97 | // visualization |
| 98 | void HandleAprilTags(const TargetMap &map, |
| 99 | aos::distributed_clock::time_point node_distributed_time, |
| 100 | std::string camera_name, Eigen::Affine3d extrinsics); |
Maxwell Henderson | 123c817 | 2024-03-01 22:54:16 -0800 | [diff] [blame] | 101 | // Gets images from the given pi and passes apriltag positions to |
| 102 | // HandleAprilTags() |
| 103 | void HandleNodeCaptures( |
| 104 | aos::EventLoop *mapping_event_loop, |
| 105 | frc971::constants::ConstantsFetcher<y2024::Constants> *constants_fetcher, |
| 106 | int camera_number); |
| 107 | |
| 108 | aos::logger::LogReader *reader_; |
| 109 | // April tag detections from all pis |
| 110 | std::vector<DataAdapter::TimestampedDetection> timestamped_target_detections_; |
| 111 | |
| 112 | VisualizeRobot vis_robot_; |
| 113 | // Set of node names which are currently drawn on the display |
| 114 | std::set<std::string> drawn_nodes_; |
| 115 | // Number of frames displayed |
| 116 | size_t display_count_; |
| 117 | // Last time we drew onto the display image. |
| 118 | // This is different from when we actually call imshow() to update |
| 119 | // the display window |
| 120 | aos::distributed_clock::time_point last_draw_time_; |
| 121 | |
| 122 | Eigen::Affine3d last_H_world_robot_; |
| 123 | // Maximum distance between consecutive T_world_robot's in one display frame, |
| 124 | // used to determine if we need to pause for the user to see this frame |
| 125 | // clearly |
| 126 | double max_delta_T_world_robot_; |
| 127 | |
| 128 | std::vector<std::unique_ptr<aos::EventLoop>> mapping_event_loops_; |
| 129 | |
| 130 | std::unique_ptr<aos::EventLoop> mcap_event_loop_; |
| 131 | std::unique_ptr<aos::McapLogger> relogger_; |
| 132 | }; |
| 133 | |
| 134 | const auto TargetMapperReplay::kOrinColors = std::map<std::string, cv::Scalar>{ |
| 135 | {"/orin1/camera0", cv::Scalar(255, 0, 255)}, |
| 136 | {"/orin1/camera1", cv::Scalar(255, 255, 0)}, |
| 137 | {"/imu/camera0", cv::Scalar(0, 255, 255)}, |
| 138 | {"/imu/camera1", cv::Scalar(255, 165, 0)}, |
| 139 | }; |
| 140 | |
| 141 | const auto TargetMapperReplay::kFixedTargetMapper = |
| 142 | TargetMapper(FLAGS_json_path, ceres::examples::VectorOfConstraints{}); |
| 143 | |
| 144 | Eigen::Affine3d TargetMapperReplay::CameraToRobotDetection( |
| 145 | Eigen::Affine3d H_camera_target, Eigen::Affine3d extrinsics) { |
| 146 | const Eigen::Affine3d H_robot_camera = extrinsics; |
| 147 | const Eigen::Affine3d H_robot_target = H_robot_camera * H_camera_target; |
| 148 | return H_robot_target; |
| 149 | } |
| 150 | |
| 151 | TargetMapperReplay::TargetMapperReplay(aos::logger::LogReader *reader) |
| 152 | : reader_(reader), |
| 153 | timestamped_target_detections_(), |
| 154 | vis_robot_(cv::Size(1280, 1000)), |
| 155 | drawn_nodes_(), |
| 156 | display_count_(0), |
| 157 | last_draw_time_(aos::distributed_clock::min_time), |
| 158 | last_H_world_robot_(Eigen::Matrix4d::Identity()), |
| 159 | max_delta_T_world_robot_(0.0) { |
| 160 | reader_->RemapLoggedChannel("/orin1/constants", "y2024.Constants"); |
| 161 | reader_->RemapLoggedChannel("/imu/constants", "y2024.Constants"); |
| 162 | reader_->Register(); |
| 163 | |
| 164 | SendSimulationConstants(reader_->event_loop_factory(), FLAGS_team_number, |
| 165 | FLAGS_constants_path); |
| 166 | |
| 167 | std::vector<std::string> node_list; |
| 168 | node_list.push_back("imu"); |
| 169 | node_list.push_back("orin1"); |
| 170 | |
| 171 | for (std::string node : node_list) { |
| 172 | const aos::Node *pi = |
| 173 | aos::configuration::GetNode(reader->configuration(), node); |
| 174 | |
| 175 | mapping_event_loops_.emplace_back( |
| 176 | reader_->event_loop_factory()->MakeEventLoop(node + "_mapping_camera0", |
| 177 | pi)); |
| 178 | mapping_event_loops_.emplace_back( |
| 179 | reader_->event_loop_factory()->MakeEventLoop(node + "_mapping_camera1", |
| 180 | pi)); |
| 181 | frc971::constants::ConstantsFetcher<y2024::Constants> constants_fetcher( |
| 182 | mapping_event_loops_[mapping_event_loops_.size() - 1].get()); |
| 183 | HandleNodeCaptures( |
| 184 | mapping_event_loops_[mapping_event_loops_.size() - 2].get(), |
| 185 | &constants_fetcher, 0); |
| 186 | HandleNodeCaptures( |
| 187 | mapping_event_loops_[mapping_event_loops_.size() - 1].get(), |
| 188 | &constants_fetcher, 1); |
| 189 | } |
| 190 | |
| 191 | if (FLAGS_visualize_solver) { |
| 192 | vis_robot_.ClearImage(); |
| 193 | const double kFocalLength = 500.0; |
| 194 | vis_robot_.SetDefaultViewpoint(kImageWidth, kFocalLength); |
| 195 | } |
| 196 | } |
| 197 | |
| 198 | // Add detected apriltag poses relative to the robot to |
| 199 | // timestamped_target_detections |
| 200 | void TargetMapperReplay::HandleAprilTags( |
| 201 | const TargetMap &map, |
| 202 | aos::distributed_clock::time_point node_distributed_time, |
| 203 | std::string camera_name, Eigen::Affine3d extrinsics) { |
| 204 | bool drew = false; |
| 205 | std::stringstream label; |
| 206 | label << camera_name << " - "; |
| 207 | |
| 208 | for (const auto *target_pose_fbs : *map.target_poses()) { |
| 209 | // Skip detections with invalid ids |
| 210 | if (static_cast<TargetMapper::TargetId>(target_pose_fbs->id()) < |
| 211 | FLAGS_min_target_id || |
| 212 | static_cast<TargetMapper::TargetId>(target_pose_fbs->id()) > |
| 213 | FLAGS_max_target_id) { |
| 214 | VLOG(1) << "Skipping tag with invalid id of " << target_pose_fbs->id(); |
| 215 | continue; |
| 216 | } |
| 217 | |
| 218 | // Skip detections with high pose errors |
| 219 | if (target_pose_fbs->pose_error() > FLAGS_max_pose_error) { |
| 220 | VLOG(1) << "Skipping tag " << target_pose_fbs->id() |
| 221 | << " due to pose error of " << target_pose_fbs->pose_error(); |
| 222 | continue; |
| 223 | } |
| 224 | // Skip detections with high pose error ratios |
| 225 | if (target_pose_fbs->pose_error_ratio() > FLAGS_max_pose_error_ratio) { |
| 226 | VLOG(1) << "Skipping tag " << target_pose_fbs->id() |
| 227 | << " due to pose error ratio of " |
| 228 | << target_pose_fbs->pose_error_ratio(); |
| 229 | continue; |
| 230 | } |
| 231 | |
| 232 | const TargetMapper::TargetPose target_pose = |
| 233 | PoseUtils::TargetPoseFromFbs(*target_pose_fbs); |
| 234 | |
| 235 | Eigen::Affine3d H_camera_target = |
| 236 | Eigen::Translation3d(target_pose.pose.p) * target_pose.pose.q; |
| 237 | Eigen::Affine3d H_robot_target = |
| 238 | CameraToRobotDetection(H_camera_target, extrinsics); |
| 239 | |
| 240 | ceres::examples::Pose3d target_pose_camera = |
| 241 | PoseUtils::Affine3dToPose3d(H_camera_target); |
| 242 | double distance_from_camera = target_pose_camera.p.norm(); |
| 243 | double distortion_factor = target_pose_fbs->distortion_factor(); |
| 244 | |
| 245 | if (distance_from_camera > 5.0) { |
| 246 | continue; |
| 247 | } |
| 248 | |
| 249 | CHECK(map.has_monotonic_timestamp_ns()) |
| 250 | << "Need detection timestamps for mapping"; |
| 251 | |
| 252 | timestamped_target_detections_.emplace_back( |
| 253 | DataAdapter::TimestampedDetection{ |
| 254 | .time = node_distributed_time, |
| 255 | .H_robot_target = H_robot_target, |
| 256 | .distance_from_camera = distance_from_camera, |
| 257 | .distortion_factor = distortion_factor, |
| 258 | .id = static_cast<TargetMapper::TargetId>(target_pose.id)}); |
| 259 | |
| 260 | if (FLAGS_visualize_solver) { |
| 261 | // If we've already drawn this camera_name in the current image, |
| 262 | // display the image before clearing and adding the new poses |
| 263 | if (drawn_nodes_.count(camera_name) != 0) { |
| 264 | display_count_++; |
| 265 | cv::putText(vis_robot_.image_, |
| 266 | "Poses #" + std::to_string(display_count_), |
| 267 | cv::Point(600, 10), cv::FONT_HERSHEY_PLAIN, 1.0, |
| 268 | cv::Scalar(255, 255, 255)); |
| 269 | |
| 270 | if (display_count_ >= FLAGS_skip_to) { |
| 271 | VLOG(1) << "Showing image for node " << camera_name |
| 272 | << " since we've drawn it already"; |
| 273 | cv::imshow("View", vis_robot_.image_); |
| 274 | // Pause if delta_T is too large, but only after first image (to make |
| 275 | // sure the delta's are correct |
| 276 | if (max_delta_T_world_robot_ > FLAGS_pause_on_distance && |
| 277 | display_count_ > 1) { |
| 278 | LOG(INFO) << "Pausing since the delta between robot estimates is " |
| 279 | << max_delta_T_world_robot_ << " which is > threshold of " |
| 280 | << FLAGS_pause_on_distance; |
| 281 | cv::waitKey(0); |
| 282 | } else { |
| 283 | cv::waitKey(FLAGS_wait_key); |
| 284 | } |
| 285 | max_delta_T_world_robot_ = 0.0; |
| 286 | } else { |
| 287 | VLOG(1) << "At poses #" << std::to_string(display_count_); |
| 288 | } |
| 289 | vis_robot_.ClearImage(); |
| 290 | drawn_nodes_.clear(); |
| 291 | } |
| 292 | |
| 293 | Eigen::Affine3d H_world_target = PoseUtils::Pose3dToAffine3d( |
| 294 | kFixedTargetMapper.GetTargetPoseById(target_pose_fbs->id())->pose); |
| 295 | Eigen::Affine3d H_world_robot = H_world_target * H_robot_target.inverse(); |
| 296 | VLOG(2) << camera_name << ", id " << target_pose_fbs->id() |
| 297 | << ", t = " << node_distributed_time |
| 298 | << ", pose_error = " << target_pose_fbs->pose_error() |
| 299 | << ", pose_error_ratio = " << target_pose_fbs->pose_error_ratio() |
| 300 | << ", robot_pos (x,y,z) = " |
| 301 | << H_world_robot.translation().transpose(); |
| 302 | |
| 303 | label << "id " << target_pose_fbs->id() << ": err (% of max): " |
| 304 | << (target_pose_fbs->pose_error() / FLAGS_max_pose_error) |
| 305 | << " err_ratio: " << target_pose_fbs->pose_error_ratio() << " "; |
| 306 | |
| 307 | vis_robot_.DrawRobotOutline(H_world_robot, camera_name, |
| 308 | kOrinColors.at(camera_name)); |
| 309 | vis_robot_.DrawFrameAxes(H_world_target, |
| 310 | std::to_string(target_pose_fbs->id()), |
| 311 | kOrinColors.at(camera_name)); |
| 312 | |
| 313 | double delta_T_world_robot = |
| 314 | (H_world_robot.translation() - last_H_world_robot_.translation()) |
| 315 | .norm(); |
| 316 | max_delta_T_world_robot_ = |
| 317 | std::max(delta_T_world_robot, max_delta_T_world_robot_); |
| 318 | |
| 319 | VLOG(1) << "Drew in info for robot " << camera_name << " and target #" |
| 320 | << target_pose_fbs->id(); |
| 321 | drew = true; |
| 322 | last_draw_time_ = node_distributed_time; |
| 323 | last_H_world_robot_ = H_world_robot; |
| 324 | } |
| 325 | } |
Maxwell Henderson | 123c817 | 2024-03-01 22:54:16 -0800 | [diff] [blame] | 326 | if (FLAGS_visualize_solver) { |
| 327 | if (drew) { |
| 328 | // Collect all the labels from a given node, and add the text |
| 329 | size_t pi_number = |
| 330 | static_cast<size_t>(camera_name[camera_name.size() - 1] - '0'); |
| 331 | cv::putText(vis_robot_.image_, label.str(), |
| 332 | cv::Point(10, 10 + 20 * pi_number), cv::FONT_HERSHEY_PLAIN, |
| 333 | 1.0, kOrinColors.at(camera_name)); |
| 334 | |
| 335 | drawn_nodes_.emplace(camera_name); |
| 336 | } else if (node_distributed_time - last_draw_time_ > |
| 337 | std::chrono::milliseconds(30) && |
| 338 | display_count_ >= FLAGS_skip_to) { |
| 339 | cv::putText(vis_robot_.image_, "No detections", cv::Point(10, 0), |
| 340 | cv::FONT_HERSHEY_PLAIN, 1.0, kOrinColors.at(camera_name)); |
| 341 | // Display and clear the image if we haven't draw in a while |
| 342 | VLOG(1) << "Displaying image due to time lapse"; |
| 343 | cv::imshow("View", vis_robot_.image_); |
| 344 | cv::waitKey(FLAGS_wait_key); |
| 345 | vis_robot_.ClearImage(); |
| 346 | max_delta_T_world_robot_ = 0.0; |
| 347 | drawn_nodes_.clear(); |
| 348 | } |
| 349 | } |
| 350 | } |
| 351 | |
| 352 | void TargetMapperReplay::HandleNodeCaptures( |
| 353 | aos::EventLoop *mapping_event_loop, |
| 354 | frc971::constants::ConstantsFetcher<y2024::Constants> *constants_fetcher, |
| 355 | int camera_number) { |
| 356 | // Get the camera extrinsics |
| 357 | const auto *calibration = FindCameraCalibration( |
| 358 | constants_fetcher->constants(), |
| 359 | mapping_event_loop->node()->name()->string_view(), camera_number); |
| 360 | cv::Mat extrinsics_cv = frc971::vision::CameraExtrinsics(calibration).value(); |
| 361 | Eigen::Matrix4d extrinsics_matrix; |
| 362 | cv::cv2eigen(extrinsics_cv, extrinsics_matrix); |
| 363 | const auto extrinsics = Eigen::Affine3d(extrinsics_matrix); |
| 364 | std::string camera_name = absl::StrFormat( |
| 365 | "/%s/camera%d", mapping_event_loop->node()->name()->str(), camera_number); |
| 366 | |
| 367 | mapping_event_loop->MakeWatcher( |
| 368 | camera_name.c_str(), [this, mapping_event_loop, extrinsics, |
| 369 | camera_name](const TargetMap &map) { |
| 370 | aos::distributed_clock::time_point node_distributed_time = |
| 371 | reader_->event_loop_factory() |
| 372 | ->GetNodeEventLoopFactory(mapping_event_loop->node()) |
| 373 | ->ToDistributedClock(aos::monotonic_clock::time_point( |
| 374 | aos::monotonic_clock::duration( |
| 375 | map.monotonic_timestamp_ns()))); |
| 376 | |
| 377 | HandleAprilTags(map, node_distributed_time, camera_name, extrinsics); |
| 378 | }); |
| 379 | } |
| 380 | |
| 381 | void TargetMapperReplay::MaybeSolve() { |
| 382 | if (FLAGS_solve) { |
| 383 | auto target_constraints = |
| 384 | DataAdapter::MatchTargetDetections(timestamped_target_detections_); |
| 385 | |
| 386 | // Remove constraints between the two sides of the field - these are |
| 387 | // basically garbage because of how far the camera is. We will use seeding |
| 388 | // below to connect the two sides |
| 389 | target_constraints.erase( |
| 390 | std::remove_if(target_constraints.begin(), target_constraints.end(), |
| 391 | [](const auto &constraint) { |
Maxwell Henderson | 9beb569 | 2024-03-17 18:36:11 -0700 | [diff] [blame^] | 392 | // TODO(james): This no longer makes sense. |
Maxwell Henderson | 123c817 | 2024-03-01 22:54:16 -0800 | [diff] [blame] | 393 | constexpr TargetMapper::TargetId kMaxRedId = 4; |
| 394 | TargetMapper::TargetId min_id = |
| 395 | std::min(constraint.id_begin, constraint.id_end); |
| 396 | TargetMapper::TargetId max_id = |
| 397 | std::max(constraint.id_begin, constraint.id_end); |
| 398 | return (min_id <= kMaxRedId && max_id > kMaxRedId); |
| 399 | }), |
| 400 | target_constraints.end()); |
| 401 | |
| 402 | LOG(INFO) << "Solving for locations of tags with " |
| 403 | << target_constraints.size() << " constraints"; |
| 404 | TargetMapper mapper(FLAGS_json_path, target_constraints); |
| 405 | mapper.Solve(FLAGS_field_name, FLAGS_output_dir); |
| 406 | |
| 407 | if (!FLAGS_dump_constraints_to.empty()) { |
| 408 | mapper.DumpConstraints(FLAGS_dump_constraints_to); |
| 409 | } |
| 410 | if (!FLAGS_dump_stats_to.empty()) { |
| 411 | mapper.DumpStats(FLAGS_dump_stats_to); |
| 412 | } |
| 413 | } |
| 414 | } |
| 415 | |
| 416 | void MappingMain(int argc, char *argv[]) { |
| 417 | std::vector<DataAdapter::TimestampedDetection> timestamped_target_detections; |
| 418 | |
| 419 | std::optional<aos::FlatbufferDetachedBuffer<aos::Configuration>> config = |
| 420 | (FLAGS_config.empty() |
| 421 | ? std::nullopt |
| 422 | : std::make_optional(aos::configuration::ReadConfig(FLAGS_config))); |
| 423 | |
| 424 | // Open logfiles |
| 425 | aos::logger::LogReader reader( |
| 426 | aos::logger::SortParts(aos::logger::FindLogs(argc, argv)), |
| 427 | config.has_value() ? &config->message() : nullptr); |
| 428 | |
| 429 | TargetMapperReplay mapper_replay(&reader); |
| 430 | reader.event_loop_factory()->Run(); |
| 431 | mapper_replay.MaybeSolve(); |
| 432 | } |
| 433 | |
| 434 | } // namespace y2024::vision |
| 435 | |
| 436 | int main(int argc, char **argv) { |
| 437 | aos::InitGoogle(&argc, &argv); |
| 438 | y2024::vision::MappingMain(argc, argv); |
| 439 | } |