Move over to ABSL logging and flags.
Removes gperftools too since that wants gflags.
Here come the fireworks.
Change-Id: I79cb7bcf60f1047fbfa28bfffc21a0fd692e4b1c
Signed-off-by: Austin Schuh <austin.linux@gmail.com>
diff --git a/y2024/vision/BUILD b/y2024/vision/BUILD
index 500cad6..fb73392 100644
--- a/y2024/vision/BUILD
+++ b/y2024/vision/BUILD
@@ -54,9 +54,10 @@
"//third_party:cudart",
"//third_party/apriltag",
"//y2024/constants:constants_fbs",
- "@com_github_gflags_gflags//:gflags",
- "@com_github_google_glog//:glog",
"@com_github_nvidia_cccl//:cccl",
+ "@com_google_absl//absl/flags:flag",
+ "@com_google_absl//absl/log",
+ "@com_google_absl//absl/log:check",
],
)
diff --git a/y2024/vision/apriltag_detector.cc b/y2024/vision/apriltag_detector.cc
index 5edcf36..998625c 100644
--- a/y2024/vision/apriltag_detector.cc
+++ b/y2024/vision/apriltag_detector.cc
@@ -1,17 +1,20 @@
#include <string>
+#include "absl/flags/flag.h"
+
#include "aos/init.h"
#include "frc971/orin/gpu_apriltag.h"
#include "y2024/constants/constants_generated.h"
#include "y2024/vision/vision_util.h"
-DEFINE_string(channel, "/camera", "Channel name");
-DEFINE_string(config, "aos_config.json", "Path to the config file to use.");
+ABSL_FLAG(std::string, channel, "/camera", "Channel name");
+ABSL_FLAG(std::string, config, "aos_config.json",
+ "Path to the config file to use.");
void GpuApriltagDetector() {
aos::FlatbufferDetachedBuffer<aos::Configuration> config =
- aos::configuration::ReadConfig(FLAGS_config);
+ aos::configuration::ReadConfig(absl::GetFlag(FLAGS_config));
frc971::constants::WaitForConstants<y2024::Constants>(&config.message());
@@ -20,15 +23,15 @@
const frc971::constants::ConstantsFetcher<y2024::Constants> calibration_data(
&event_loop);
- CHECK(FLAGS_channel.length() == 8);
- int camera_id = std::stoi(FLAGS_channel.substr(7, 1));
+ CHECK(absl::GetFlag(FLAGS_channel).length() == 8);
+ int camera_id = std::stoi(absl::GetFlag(FLAGS_channel).substr(7, 1));
const frc971::vision::calibration::CameraCalibration *calibration =
y2024::vision::FindCameraCalibration(
calibration_data.constants(),
event_loop.node()->name()->string_view(), camera_id);
- frc971::apriltag::ApriltagDetector detector(&event_loop, FLAGS_channel,
- calibration);
+ frc971::apriltag::ApriltagDetector detector(
+ &event_loop, absl::GetFlag(FLAGS_channel), calibration);
// TODO(austin): Figure out our core pinning strategy.
// event_loop.SetRuntimeAffinity(aos::MakeCpusetFromCpus({5}));
diff --git a/y2024/vision/calibrate_multi_cameras.cc b/y2024/vision/calibrate_multi_cameras.cc
index 979c0d4..7a9f412 100644
--- a/y2024/vision/calibrate_multi_cameras.cc
+++ b/y2024/vision/calibrate_multi_cameras.cc
@@ -1,5 +1,7 @@
#include <numeric>
+#include "absl/flags/flag.h"
+
#include "aos/configuration.h"
#include "aos/events/logging/log_reader.h"
#include "aos/events/simulated_event_loop.h"
@@ -28,32 +30,31 @@
#include "y2024/constants/simulated_constants_sender.h"
#include "y2024/vision/vision_util.h"
-DEFINE_bool(alt_view, false,
- "If true, show visualization from field level, rather than above");
-DEFINE_string(config, "",
- "If set, override the log's config file with this one.");
-DEFINE_string(constants_path, "y2024/constants/constants.json",
- "Path to the constant file");
-DEFINE_double(max_pose_error, 5e-5,
- "Throw out target poses with a higher pose error than this");
-DEFINE_double(
- max_pose_error_ratio, 0.4,
- "Throw out target poses with a higher pose error ratio than this");
-DEFINE_string(output_folder, "/tmp",
- "Directory in which to store the updated calibration files");
-DEFINE_string(target_type, "charuco_diamond",
- "Type of target being used [aruco, charuco, charuco_diamond]");
-DEFINE_int32(team_number, 0,
- "Required: Use the calibration for a node with this team number");
-DEFINE_uint64(
- wait_key, 1,
+ABSL_FLAG(bool, alt_view, false,
+ "If true, show visualization from field level, rather than above");
+ABSL_FLAG(std::string, config, "",
+ "If set, override the log's config file with this one.");
+ABSL_FLAG(std::string, constants_path, "y2024/constants/constants.json",
+ "Path to the constant file");
+ABSL_FLAG(double, max_pose_error, 5e-5,
+ "Throw out target poses with a higher pose error than this");
+ABSL_FLAG(double, max_pose_error_ratio, 0.4,
+ "Throw out target poses with a higher pose error ratio than this");
+ABSL_FLAG(std::string, output_folder, "/tmp",
+ "Directory in which to store the updated calibration files");
+ABSL_FLAG(std::string, target_type, "charuco_diamond",
+ "Type of target being used [aruco, charuco, charuco_diamond]");
+ABSL_FLAG(int32_t, team_number, 0,
+ "Required: Use the calibration for a node with this team number");
+ABSL_FLAG(
+ uint64_t, wait_key, 1,
"Time in ms to wait between images (0 to wait indefinitely until click)");
-DEFINE_bool(robot, false,
- "If true we're calibrating extrinsics for the robot, use the "
- "correct node path for the robot.");
+ABSL_FLAG(bool, robot, false,
+ "If true we're calibrating extrinsics for the robot, use the "
+ "correct node path for the robot.");
-DECLARE_int32(min_target_id);
-DECLARE_int32(max_target_id);
+ABSL_DECLARE_FLAG(int32_t, min_target_id);
+ABSL_DECLARE_FLAG(int32_t, max_target_id);
// Calibrate extrinsic relationship between cameras using two targets
// seen jointly between cameras. Uses two types of information: 1)
@@ -187,7 +188,7 @@
Eigen::Affine3d H_world_board;
H_world_board = Eigen::Translation3d::Identity() *
Eigen::AngleAxisd(M_PI / 2.0, Eigen::Vector3d::UnitX());
- if (FLAGS_alt_view) {
+ if (absl::GetFlag(FLAGS_alt_view)) {
// Don't rotate -- this is like viewing from the side
H_world_board = Eigen::Translation3d(0.0, 0.0, 3.0);
}
@@ -228,7 +229,7 @@
// Store this observation of the transform between two boards
two_board_extrinsics_list.push_back(boardA_boardB);
- if (FLAGS_visualize) {
+ if (absl::GetFlag(FLAGS_visualize)) {
vis_robot_.DrawFrameAxes(
H_world_board,
std::string("Board ") + std::to_string(target_poses[from_index].id),
@@ -272,7 +273,8 @@
// This bit is just for visualization and checking purposes-- use the
// last two-board observation to figure out the current estimate
// between the two cameras
- if (FLAGS_visualize && two_board_extrinsics_list.size() > 0) {
+ if (absl::GetFlag(FLAGS_visualize) &&
+ two_board_extrinsics_list.size() > 0) {
draw_vis = true;
TimestampedCameraDetection &last_two_board_ext =
two_board_extrinsics_list.back();
@@ -329,18 +331,18 @@
}
}
- if (FLAGS_visualize) {
+ if (absl::GetFlag(FLAGS_visualize)) {
if (!rgb_image.empty()) {
std::string image_name = camera_name + " Image";
cv::Mat rgb_small;
cv::resize(rgb_image, rgb_small, cv::Size(), 0.5, 0.5);
cv::imshow(image_name, rgb_small);
- cv::waitKey(FLAGS_wait_key);
+ cv::waitKey(absl::GetFlag(FLAGS_wait_key));
}
if (draw_vis) {
cv::imshow("Overhead View", vis_robot_.image_);
- cv::waitKey(FLAGS_wait_key);
+ cv::waitKey(absl::GetFlag(FLAGS_wait_key));
vis_robot_.ClearImage();
}
}
@@ -357,23 +359,24 @@
for (const auto *target_pose_fbs : *map.target_poses()) {
// Skip detections with invalid ids
if (static_cast<TargetMapper::TargetId>(target_pose_fbs->id()) <
- FLAGS_min_target_id ||
+ absl::GetFlag(FLAGS_min_target_id) ||
static_cast<TargetMapper::TargetId>(target_pose_fbs->id()) >
- FLAGS_max_target_id) {
+ absl::GetFlag(FLAGS_max_target_id)) {
VLOG(1) << "Skipping tag from " << camera_name << " with invalid id of "
<< target_pose_fbs->id();
continue;
}
// Skip detections with high pose errors
- if (target_pose_fbs->pose_error() > FLAGS_max_pose_error) {
+ if (target_pose_fbs->pose_error() > absl::GetFlag(FLAGS_max_pose_error)) {
LOG(INFO) << "Skipping tag from " << camera_name << " with id "
<< target_pose_fbs->id() << " due to pose error of "
<< target_pose_fbs->pose_error();
continue;
}
// Skip detections with high pose error ratios
- if (target_pose_fbs->pose_error_ratio() > FLAGS_max_pose_error_ratio) {
+ if (target_pose_fbs->pose_error_ratio() >
+ absl::GetFlag(FLAGS_max_pose_error_ratio)) {
LOG(INFO) << "Skipping tag from " << camera_name << " with id "
<< target_pose_fbs->id() << " due to pose error ratio of "
<< target_pose_fbs->pose_error_ratio();
@@ -437,9 +440,10 @@
vis_robot_.SetDefaultViewpoint(kImageWidth, kFocalLength);
std::optional<aos::FlatbufferDetachedBuffer<aos::Configuration>> config =
- (FLAGS_config.empty()
+ (absl::GetFlag(FLAGS_config).empty()
? std::nullopt
- : std::make_optional(aos::configuration::ReadConfig(FLAGS_config)));
+ : std::make_optional(
+ aos::configuration::ReadConfig(absl::GetFlag(FLAGS_config))));
// open logfiles
aos::logger::LogReader reader(
@@ -448,15 +452,16 @@
reader.RemapLoggedChannel("/imu/constants", "y2024.Constants");
reader.RemapLoggedChannel("/orin1/constants", "y2024.Constants");
- if (FLAGS_robot) {
+ if (absl::GetFlag(FLAGS_robot)) {
reader.RemapLoggedChannel("/roborio/constants", "y2024.Constants");
}
reader.Register();
- y2024::SendSimulationConstants(reader.event_loop_factory(), FLAGS_team_number,
- FLAGS_constants_path);
+ y2024::SendSimulationConstants(reader.event_loop_factory(),
+ absl::GetFlag(FLAGS_team_number),
+ absl::GetFlag(FLAGS_constants_path));
- VLOG(1) << "Using target type " << FLAGS_target_type;
+ VLOG(1) << "Using target type " << absl::GetFlag(FLAGS_target_type);
std::vector<const calibration::CameraCalibration *> calibration_list;
@@ -591,7 +596,7 @@
Eigen::Affine3d H_world_board;
H_world_board = Eigen::Translation3d::Identity() *
Eigen::AngleAxisd(M_PI / 2.0, Eigen::Vector3d::UnitX());
- if (FLAGS_alt_view) {
+ if (absl::GetFlag(FLAGS_alt_view)) {
H_world_board = Eigen::Translation3d(0.0, 0.0, 3.0);
}
@@ -675,9 +680,9 @@
CameraNode &camera_node = node_list[i + 1];
const std::string calibration_filename =
frc971::vision::CalibrationFilename(
- FLAGS_output_folder, camera_node.node_name, FLAGS_team_number,
- camera_node.camera_number, cal_copy.message().camera_id()->data(),
- time_ss.str());
+ absl::GetFlag(FLAGS_output_folder), camera_node.node_name,
+ absl::GetFlag(FLAGS_team_number), camera_node.camera_number,
+ cal_copy.message().camera_id()->data(), time_ss.str());
LOG(INFO) << calibration_filename << " -> "
<< aos::FlatbufferToJson(merged_calibration,
{.multi_line = true});
@@ -686,7 +691,7 @@
calibration_filename,
aos::FlatbufferToJson(merged_calibration, {.multi_line = true}));
- if (FLAGS_visualize) {
+ if (absl::GetFlag(FLAGS_visualize)) {
// Draw each of the updated extrinsic camera locations
vis_robot_.SetDefaultViewpoint(1000.0, 1500.0);
vis_robot_.DrawFrameAxes(
@@ -695,7 +700,7 @@
}
}
}
- if (FLAGS_visualize) {
+ if (absl::GetFlag(FLAGS_visualize)) {
// And don't forget to draw the base camera location
vis_robot_.DrawFrameAxes(updated_extrinsics[0],
node_list.at(0).camera_name(),
diff --git a/y2024/vision/target_mapping.cc b/y2024/vision/target_mapping.cc
index 5681580..f5fa34c 100644
--- a/y2024/vision/target_mapping.cc
+++ b/y2024/vision/target_mapping.cc
@@ -1,6 +1,7 @@
#include <string>
#include "Eigen/Dense"
+#include "absl/flags/flag.h"
#include "opencv2/aruco.hpp"
#include "opencv2/calib3d.hpp"
#include "opencv2/core/eigen.hpp"
@@ -25,46 +26,45 @@
#include "y2024/constants/simulated_constants_sender.h"
#include "y2024/vision/vision_util.h"
-DEFINE_string(config, "",
- "If set, override the log's config file with this one.");
-DEFINE_string(constants_path, "y2024/constants/constants.json",
- "Path to the constant file");
-DEFINE_string(dump_constraints_to, "/tmp/mapping_constraints.txt",
- "Write the target constraints to this path");
-DEFINE_string(dump_stats_to, "/tmp/mapping_stats.txt",
- "Write the mapping stats to this path");
-DEFINE_string(field_name, "crescendo",
- "Field name, for the output json filename and flatbuffer field");
-DEFINE_string(json_path, "y2024/vision/maps/target_map.json",
- "Specify path for json with initial pose guesses.");
-DEFINE_double(max_pose_error, 1e-6,
- "Throw out target poses with a higher pose error than this");
-DEFINE_double(
- max_pose_error_ratio, 0.4,
- "Throw out target poses with a higher pose error ratio than this");
-DEFINE_string(mcap_output_path, "", "Log to output.");
-DEFINE_string(output_dir, "y2024/vision/maps",
- "Directory to write solved target map to");
-DEFINE_double(pause_on_distance, 2.0,
- "Pause if two consecutive implied robot positions differ by more "
- "than this many meters");
-DEFINE_string(orin, "orin1",
- "Orin name to generate mcap log for; defaults to orin1.");
-DEFINE_uint64(skip_to, 1,
- "Start at combined image of this number (1 is the first image)");
-DEFINE_bool(solve, true, "Whether to solve for the field's target map.");
-DEFINE_bool(split_field, false,
- "Whether to break solve into two sides of field");
-DEFINE_int32(team_number, 0,
- "Required: Use the calibration for a node with this team number");
-DEFINE_uint64(wait_key, 1,
- "Time in ms to wait between images, if no click (0 to wait "
- "indefinitely until click).");
+ABSL_FLAG(std::string, config, "",
+ "If set, override the log's config file with this one.");
+ABSL_FLAG(std::string, constants_path, "y2024/constants/constants.json",
+ "Path to the constant file");
+ABSL_FLAG(std::string, dump_constraints_to, "/tmp/mapping_constraints.txt",
+ "Write the target constraints to this path");
+ABSL_FLAG(std::string, dump_stats_to, "/tmp/mapping_stats.txt",
+ "Write the mapping stats to this path");
+ABSL_FLAG(std::string, field_name, "crescendo",
+ "Field name, for the output json filename and flatbuffer field");
+ABSL_FLAG(std::string, json_path, "y2024/vision/maps/target_map.json",
+ "Specify path for json with initial pose guesses.");
+ABSL_FLAG(double, max_pose_error, 1e-6,
+ "Throw out target poses with a higher pose error than this");
+ABSL_FLAG(double, max_pose_error_ratio, 0.4,
+ "Throw out target poses with a higher pose error ratio than this");
+ABSL_FLAG(std::string, mcap_output_path, "", "Log to output.");
+ABSL_FLAG(std::string, output_dir, "y2024/vision/maps",
+ "Directory to write solved target map to");
+ABSL_FLAG(double, pause_on_distance, 2.0,
+ "Pause if two consecutive implied robot positions differ by more "
+ "than this many meters");
+ABSL_FLAG(std::string, orin, "orin1",
+ "Orin name to generate mcap log for; defaults to orin1.");
+ABSL_FLAG(uint64_t, skip_to, 1,
+ "Start at combined image of this number (1 is the first image)");
+ABSL_FLAG(bool, solve, true, "Whether to solve for the field's target map.");
+ABSL_FLAG(bool, split_field, false,
+ "Whether to break solve into two sides of field");
+ABSL_FLAG(int32_t, team_number, 0,
+ "Required: Use the calibration for a node with this team number");
+ABSL_FLAG(uint64_t, wait_key, 1,
+ "Time in ms to wait between images, if no click (0 to wait "
+ "indefinitely until click).");
-DECLARE_int32(frozen_target_id);
-DECLARE_int32(min_target_id);
-DECLARE_int32(max_target_id);
-DECLARE_bool(visualize_solver);
+ABSL_DECLARE_FLAG(int32_t, frozen_target_id);
+ABSL_DECLARE_FLAG(int32_t, min_target_id);
+ABSL_DECLARE_FLAG(int32_t, max_target_id);
+ABSL_DECLARE_FLAG(bool, visualize_solver);
namespace y2024::vision {
using frc971::vision::DataAdapter;
@@ -148,8 +148,8 @@
{9, "blue"}, {10, "blue"}, {11, "red"}, {12, "red"},
{13, "red"}, {14, "blue"}, {15, "blue"}, {16, "blue"}};
-const auto TargetMapperReplay::kFixedTargetMapper =
- TargetMapper(FLAGS_json_path, ceres::examples::VectorOfConstraints{});
+const auto TargetMapperReplay::kFixedTargetMapper = TargetMapper(
+ absl::GetFlag(FLAGS_json_path), ceres::examples::VectorOfConstraints{});
Eigen::Affine3d TargetMapperReplay::CameraToRobotDetection(
Eigen::Affine3d H_camera_target, Eigen::Affine3d extrinsics) {
@@ -173,10 +173,11 @@
reader_->MaybeRemapLoggedChannel<Constants>("/roborio/constants");
reader_->Register();
- SendSimulationConstants(reader_->event_loop_factory(), FLAGS_team_number,
- FLAGS_constants_path);
+ SendSimulationConstants(reader_->event_loop_factory(),
+ absl::GetFlag(FLAGS_team_number),
+ absl::GetFlag(FLAGS_constants_path));
- if (FLAGS_visualize_solver) {
+ if (absl::GetFlag(FLAGS_visualize_solver)) {
vis_robot_.ClearImage();
// Set focal length to zoomed in, to view extrinsics
const double kFocalLength = 1500.0;
@@ -197,7 +198,7 @@
mapping_event_loops_[mapping_event_loops_.size() - 1].get(),
&constants_fetcher, camera_node.camera_number);
- if (FLAGS_visualize_solver) {
+ if (absl::GetFlag(FLAGS_visualize_solver)) {
// Show the extrinsics calibration to start, for reference to confirm
const auto *calibration = FindCameraCalibration(
constants_fetcher.constants(),
@@ -214,7 +215,7 @@
}
}
- if (FLAGS_visualize_solver) {
+ if (absl::GetFlag(FLAGS_visualize_solver)) {
cv::imshow("Extrinsics", vis_robot_.image_);
cv::waitKey(0);
vis_robot_.ClearImage();
@@ -242,21 +243,22 @@
for (const auto *target_pose_fbs : *map.target_poses()) {
// Skip detections with invalid ids
if (static_cast<TargetMapper::TargetId>(target_pose_fbs->id()) <
- FLAGS_min_target_id ||
+ absl::GetFlag(FLAGS_min_target_id) ||
static_cast<TargetMapper::TargetId>(target_pose_fbs->id()) >
- FLAGS_max_target_id) {
+ absl::GetFlag(FLAGS_max_target_id)) {
VLOG(1) << "Skipping tag with invalid id of " << target_pose_fbs->id();
continue;
}
// Skip detections with high pose errors
- if (target_pose_fbs->pose_error() > FLAGS_max_pose_error) {
+ if (target_pose_fbs->pose_error() > absl::GetFlag(FLAGS_max_pose_error)) {
VLOG(1) << "Skipping tag " << target_pose_fbs->id()
<< " due to pose error of " << target_pose_fbs->pose_error();
continue;
}
// Skip detections with high pose error ratios
- if (target_pose_fbs->pose_error_ratio() > FLAGS_max_pose_error_ratio) {
+ if (target_pose_fbs->pose_error_ratio() >
+ absl::GetFlag(FLAGS_max_pose_error_ratio)) {
VLOG(1) << "Skipping tag " << target_pose_fbs->id()
<< " due to pose error ratio of "
<< target_pose_fbs->pose_error_ratio();
@@ -296,7 +298,7 @@
.distortion_factor = distortion_factor,
.id = static_cast<TargetMapper::TargetId>(target_pose.id)});
- if (FLAGS_visualize_solver) {
+ if (absl::GetFlag(FLAGS_visualize_solver)) {
// If we've already drawn this camera_name in the current image,
// display the image before clearing and adding the new poses
if (drawn_cameras_.count(camera_name) != 0) {
@@ -306,20 +308,21 @@
cv::Point(600, 10), cv::FONT_HERSHEY_PLAIN, 1.0,
cv::Scalar(255, 255, 255));
- if (display_count_ >= FLAGS_skip_to) {
+ if (display_count_ >= absl::GetFlag(FLAGS_skip_to)) {
VLOG(1) << "Showing image for camera " << camera_name
<< " since we've drawn it already";
cv::imshow("View", vis_robot_.image_);
// Pause if delta_T is too large, but only after first image (to make
// sure the delta's are correct)
- if (max_delta_T_world_robot_ > FLAGS_pause_on_distance &&
+ if (max_delta_T_world_robot_ >
+ absl::GetFlag(FLAGS_pause_on_distance) &&
display_count_ > 1) {
LOG(INFO) << "Pausing since the delta between robot estimates is "
<< max_delta_T_world_robot_ << " which is > threshold of "
- << FLAGS_pause_on_distance;
+ << absl::GetFlag(FLAGS_pause_on_distance);
cv::waitKey(0);
} else {
- cv::waitKey(FLAGS_wait_key);
+ cv::waitKey(absl::GetFlag(FLAGS_wait_key));
}
max_delta_T_world_robot_ = 0.0;
} else {
@@ -341,7 +344,8 @@
label << "id " << target_pose_fbs->id()
<< ": err (% of max): " << target_pose_fbs->pose_error() << " ("
- << (target_pose_fbs->pose_error() / FLAGS_max_pose_error)
+ << (target_pose_fbs->pose_error() /
+ absl::GetFlag(FLAGS_max_pose_error))
<< ") err_ratio: " << target_pose_fbs->pose_error_ratio() << " ";
vis_robot_.DrawRobotOutline(H_world_robot, camera_name,
@@ -363,7 +367,7 @@
last_H_world_robot_ = H_world_robot;
}
}
- if (FLAGS_visualize_solver) {
+ if (absl::GetFlag(FLAGS_visualize_solver)) {
if (drew) {
// Collect all the labels from a given camera, and add the text
// TODO: Need to fix this one
@@ -374,7 +378,7 @@
drawn_cameras_.emplace(camera_name);
} else if (node_distributed_time - last_draw_time_ >
std::chrono::milliseconds(30) &&
- display_count_ >= FLAGS_skip_to && drew) {
+ display_count_ >= absl::GetFlag(FLAGS_skip_to) && drew) {
// TODO: Check on 30ms value-- does this make sense?
double delta_t = (node_distributed_time - last_draw_time_).count() / 1e6;
VLOG(1) << "Last result was " << delta_t << "ms ago";
@@ -384,7 +388,7 @@
// Display and clear the image if we haven't draw in a while
VLOG(1) << "Displaying image due to time lapse";
cv::imshow("View", vis_robot_.image_);
- cv::waitKey(FLAGS_wait_key);
+ cv::waitKey(absl::GetFlag(FLAGS_wait_key));
max_delta_T_world_robot_ = 0.0;
drawn_cameras_.clear();
}
@@ -422,11 +426,11 @@
}
void TargetMapperReplay::MaybeSolve() {
- if (FLAGS_solve) {
+ if (absl::GetFlag(FLAGS_solve)) {
auto target_constraints =
DataAdapter::MatchTargetDetections(timestamped_target_detections_);
- if (FLAGS_split_field) {
+ if (absl::GetFlag(FLAGS_split_field)) {
// Remove constraints between the two sides of the field - these are
// basically garbage because of how far the camera is. We will use seeding
// below to connect the two sides
@@ -443,14 +447,15 @@
LOG(INFO) << "Solving for locations of tags with "
<< target_constraints.size() << " constraints";
- TargetMapper mapper(FLAGS_json_path, target_constraints);
- mapper.Solve(FLAGS_field_name, FLAGS_output_dir);
+ TargetMapper mapper(absl::GetFlag(FLAGS_json_path), target_constraints);
+ mapper.Solve(absl::GetFlag(FLAGS_field_name),
+ absl::GetFlag(FLAGS_output_dir));
- if (!FLAGS_dump_constraints_to.empty()) {
- mapper.DumpConstraints(FLAGS_dump_constraints_to);
+ if (!absl::GetFlag(FLAGS_dump_constraints_to).empty()) {
+ mapper.DumpConstraints(absl::GetFlag(FLAGS_dump_constraints_to));
}
- if (!FLAGS_dump_stats_to.empty()) {
- mapper.DumpStats(FLAGS_dump_stats_to);
+ if (!absl::GetFlag(FLAGS_dump_stats_to).empty()) {
+ mapper.DumpStats(absl::GetFlag(FLAGS_dump_stats_to));
}
mapper.PrintDiffs();
}
@@ -460,9 +465,10 @@
std::vector<DataAdapter::TimestampedDetection> timestamped_target_detections;
std::optional<aos::FlatbufferDetachedBuffer<aos::Configuration>> config =
- (FLAGS_config.empty()
+ (absl::GetFlag(FLAGS_config).empty()
? std::nullopt
- : std::make_optional(aos::configuration::ReadConfig(FLAGS_config)));
+ : std::make_optional(
+ aos::configuration::ReadConfig(absl::GetFlag(FLAGS_config))));
// Open logfiles
aos::logger::LogReader reader(
diff --git a/y2024/vision/viewer.cc b/y2024/vision/viewer.cc
index d5ada14..c72c08e 100644
--- a/y2024/vision/viewer.cc
+++ b/y2024/vision/viewer.cc
@@ -12,12 +12,13 @@
#include "frc971/vision/vision_util_lib.h"
#include "y2024/vision/vision_util.h"
-DEFINE_string(capture, "",
- "If set, capture a single image and save it to this filename.");
-DEFINE_string(channel, "/camera", "Channel name for the image.");
-DEFINE_string(config, "aos_config.json", "Path to the config file to use.");
-DEFINE_int32(rate, 100, "Time in milliseconds to wait between images");
-DEFINE_double(scale, 1.0, "Scale factor for images being displayed");
+ABSL_FLAG(std::string, capture, "",
+ "If set, capture a single image and save it to this filename.");
+ABSL_FLAG(std::string, channel, "/camera", "Channel name for the image.");
+ABSL_FLAG(std::string, config, "aos_config.json",
+ "Path to the config file to use.");
+ABSL_FLAG(int32_t, rate, 100, "Time in milliseconds to wait between images");
+ABSL_FLAG(double, scale, 1.0, "Scale factor for images being displayed");
namespace y2024::vision {
namespace {
@@ -42,12 +43,12 @@
cv::Mat bgr_image(cv::Size(image->cols(), image->rows()), CV_8UC3);
cv::cvtColor(image_color_mat, bgr_image, cv::COLOR_YUV2BGR_YUYV);
- if (!FLAGS_capture.empty()) {
- if (absl::EndsWith(FLAGS_capture, ".bfbs")) {
- aos::WriteFlatbufferToFile(FLAGS_capture,
+ if (!absl::GetFlag(FLAGS_capture).empty()) {
+ if (absl::EndsWith(absl::GetFlag(FLAGS_capture), ".bfbs")) {
+ aos::WriteFlatbufferToFile(absl::GetFlag(FLAGS_capture),
image_fetcher->CopyFlatBuffer());
} else {
- cv::imwrite(FLAGS_capture, bgr_image);
+ cv::imwrite(absl::GetFlag(FLAGS_capture), bgr_image);
}
return false;
@@ -55,9 +56,9 @@
cv::Mat undistorted_image;
cv::undistort(bgr_image, undistorted_image, intrinsics, dist_coeffs);
- if (FLAGS_scale != 1.0) {
- cv::resize(undistorted_image, undistorted_image, cv::Size(), FLAGS_scale,
- FLAGS_scale);
+ if (absl::GetFlag(FLAGS_scale) != 1.0) {
+ cv::resize(undistorted_image, undistorted_image, cv::Size(),
+ absl::GetFlag(FLAGS_scale), absl::GetFlag(FLAGS_scale));
}
cv::imshow("Display", undistorted_image);
@@ -77,7 +78,7 @@
void ViewerMain() {
aos::FlatbufferDetachedBuffer<aos::Configuration> config =
- aos::configuration::ReadConfig(FLAGS_config);
+ aos::configuration::ReadConfig(absl::GetFlag(FLAGS_config));
frc971::constants::WaitForConstants<y2024::Constants>(&config.message());
@@ -85,8 +86,8 @@
frc971::constants::ConstantsFetcher<y2024::Constants> constants_fetcher(
&event_loop);
- CHECK(FLAGS_channel.length() == 8);
- int camera_id = std::stoi(FLAGS_channel.substr(7, 1));
+ CHECK(absl::GetFlag(FLAGS_channel).length() == 8);
+ int camera_id = std::stoi(absl::GetFlag(FLAGS_channel).substr(7, 1));
const auto *calibration_data = FindCameraCalibration(
constants_fetcher.constants(), event_loop.node()->name()->string_view(),
camera_id);
@@ -95,7 +96,7 @@
frc971::vision::CameraDistCoeffs(calibration_data);
aos::Fetcher<CameraImage> image_fetcher =
- event_loop.MakeFetcher<CameraImage>(FLAGS_channel);
+ event_loop.MakeFetcher<CameraImage>(absl::GetFlag(FLAGS_channel));
// Run the display loop
event_loop.AddPhasedLoop(
@@ -105,7 +106,7 @@
event_loop.Exit();
};
},
- ::std::chrono::milliseconds(FLAGS_rate));
+ ::std::chrono::milliseconds(absl::GetFlag(FLAGS_rate)));
event_loop.Run();
diff --git a/y2024/vision/vision_util.cc b/y2024/vision/vision_util.cc
index becb1cf..034ecdb 100644
--- a/y2024/vision/vision_util.cc
+++ b/y2024/vision/vision_util.cc
@@ -1,6 +1,7 @@
#include "y2024/vision/vision_util.h"
-#include "glog/logging.h"
+#include "absl/log/check.h"
+#include "absl/log/log.h"
namespace y2024::vision {