Move over to ABSL logging and flags.
Removes gperftools too since that wants gflags.
Here come the fireworks.
Change-Id: I79cb7bcf60f1047fbfa28bfffc21a0fd692e4b1c
Signed-off-by: Austin Schuh <austin.linux@gmail.com>
diff --git a/y2020/vision/camera_reader.cc b/y2020/vision/camera_reader.cc
index 251f988..37d35e6 100644
--- a/y2020/vision/camera_reader.cc
+++ b/y2020/vision/camera_reader.cc
@@ -2,6 +2,7 @@
#include <math.h>
+#include "absl/flags/flag.h"
#include <opencv2/calib3d.hpp>
#include <opencv2/features2d.hpp>
#include <opencv2/imgproc.hpp>
@@ -16,12 +17,12 @@
#include "y2020/vision/sift/sift_training_generated.h"
#include "y2020/vision/tools/python_code/sift_training_data.h"
-DEFINE_bool(skip_sift, false,
- "If true don't run any feature extraction. Just forward images.");
-DEFINE_bool(ransac_pose, false,
- "If true, do pose estimate with RANSAC; else, use ITERATIVE mode.");
-DEFINE_bool(use_prev_pose, true,
- "If true, use previous pose estimate as seed for next estimate.");
+ABSL_FLAG(bool, skip_sift, false,
+ "If true don't run any feature extraction. Just forward images.");
+ABSL_FLAG(bool, ransac_pose, false,
+ "If true, do pose estimate with RANSAC; else, use ITERATIVE mode.");
+ABSL_FLAG(bool, use_prev_pose, true,
+ "If true, use previous pose estimate as seed for next estimate.");
namespace frc971::vision {
@@ -160,7 +161,7 @@
std::vector<cv::KeyPoint> keypoints;
cv::Mat descriptors;
- if (!FLAGS_skip_sift) {
+ if (!absl::GetFlag(FLAGS_skip_sift)) {
sift_->detectAndCompute(image_mat, cv::noArray(), keypoints, descriptors);
}
@@ -176,7 +177,7 @@
for (int image_idx = 0; image_idx < number_training_images(); ++image_idx) {
// Then, match those features against our training data.
std::vector<std::vector<cv::DMatch>> matches;
- if (!FLAGS_skip_sift) {
+ if (!absl::GetFlag(FLAGS_skip_sift)) {
matchers_[image_idx].knnMatch(/* queryDescriptors */ descriptors, matches,
/* k */ 2);
}
@@ -318,7 +319,7 @@
// it's sometimes bouncing between two possible poses. Putting it
// near the previous pose helps it converge to the previous pose
// estimate (assuming it's valid).
- if (FLAGS_use_prev_pose) {
+ if (absl::GetFlag(FLAGS_use_prev_pose)) {
R_camera_field_vec = prev_camera_field_R_vec_list_[i].clone();
T_camera_field = prev_camera_field_T_list_[i].clone();
VLOG(2) << "Using previous match for training image " << i
@@ -326,13 +327,13 @@
}
// Compute the pose of the camera (global origin relative to camera)
- if (FLAGS_ransac_pose) {
+ if (absl::GetFlag(FLAGS_ransac_pose)) {
// RANSAC computation is designed to be more robust to outliers.
// But, we found it bounces around a lot, even with identical points
cv::solvePnPRansac(per_image_good_match.training_points_3d,
per_image_good_match.query_points, CameraIntrinsics(),
CameraDistCoeffs(), R_camera_field_vec, T_camera_field,
- FLAGS_use_prev_pose);
+ absl::GetFlag(FLAGS_use_prev_pose));
} else {
// ITERATIVE mode is potentially less robust to outliers, but we
// found it to be more stable
@@ -340,7 +341,7 @@
cv::solvePnP(per_image_good_match.training_points_3d,
per_image_good_match.query_points, CameraIntrinsics(),
CameraDistCoeffs(), R_camera_field_vec, T_camera_field,
- FLAGS_use_prev_pose, cv::SOLVEPNP_ITERATIVE);
+ absl::GetFlag(FLAGS_use_prev_pose), cv::SOLVEPNP_ITERATIVE);
}
// We are occasionally seeing NaN in the prior estimate, so checking for
@@ -451,7 +452,7 @@
void CameraReader::ReadImage() {
if (!reader_->ReadLatestImage()) {
- if (!FLAGS_skip_sift) {
+ if (!absl::GetFlag(FLAGS_skip_sift)) {
LOG(INFO) << "No image, sleeping";
}
read_image_timer_->Schedule(event_loop_->monotonic_now() +
diff --git a/y2020/vision/camera_reader_main.cc b/y2020/vision/camera_reader_main.cc
index 1995151..90a2580 100644
--- a/y2020/vision/camera_reader_main.cc
+++ b/y2020/vision/camera_reader_main.cc
@@ -1,3 +1,6 @@
+#include "absl/flags/flag.h"
+#include "absl/log/check.h"
+
#include "aos/events/shm_event_loop.h"
#include "aos/init.h"
#include "y2020/vision/camera_reader.h"
@@ -5,14 +8,15 @@
// config used to allow running camera_reader independently. E.g.,
// bazel run //y2020/vision:camera_reader -- --config y2020/aos_config.json
// --override_hostname pi-7971-1 --ignore_timestamps true
-DEFINE_string(config, "aos_config.json", "Path to the config file to use.");
+ABSL_FLAG(std::string, config, "aos_config.json",
+ "Path to the config file to use.");
namespace frc971::vision {
namespace {
void CameraReaderMain() {
aos::FlatbufferDetachedBuffer<aos::Configuration> config =
- aos::configuration::ReadConfig(FLAGS_config);
+ aos::configuration::ReadConfig(absl::GetFlag(FLAGS_config));
const aos::FlatbufferSpan<sift::TrainingData> training_data(
SiftTrainingData());
diff --git a/y2020/vision/extrinsics_calibration.cc b/y2020/vision/extrinsics_calibration.cc
index ddbe9af..9f50373 100644
--- a/y2020/vision/extrinsics_calibration.cc
+++ b/y2020/vision/extrinsics_calibration.cc
@@ -2,6 +2,7 @@
#include "Eigen/Dense"
#include "Eigen/Geometry"
+#include "absl/flags/flag.h"
#include "absl/strings/str_format.h"
#include "aos/events/logging/log_reader.h"
@@ -15,11 +16,11 @@
#include "frc971/wpilib/imu_batch_generated.h"
#include "y2020/control_loops/superstructure/superstructure_status_generated.h"
-DEFINE_string(pi, "pi-7971-2", "Pi name to calibrate.");
-DEFINE_bool(plot, false, "Whether to plot the resulting data.");
-DEFINE_bool(turret, false, "If true, the camera is on the turret");
-DEFINE_string(base_intrinsics, "",
- "Intrinsics to use for extrinsics calibration.");
+ABSL_FLAG(std::string, pi, "pi-7971-2", "Pi name to calibrate.");
+ABSL_FLAG(bool, plot, false, "Whether to plot the resulting data.");
+ABSL_FLAG(bool, turret, false, "If true, the camera is on the turret");
+ABSL_FLAG(std::string, base_intrinsics, "",
+ "Intrinsics to use for extrinsics calibration.");
namespace frc971::vision {
namespace chrono = std::chrono;
@@ -46,7 +47,7 @@
aos::configuration::GetNode(factory.configuration(), "roborio");
std::optional<uint16_t> pi_number =
- aos::network::ParsePiOrOrinNumber(FLAGS_pi);
+ aos::network::ParsePiOrOrinNumber(absl::GetFlag(FLAGS_pi));
CHECK(pi_number);
LOG(INFO) << "Pi " << *pi_number;
const aos::Node *const pi_node = aos::configuration::GetNode(
@@ -65,13 +66,13 @@
aos::FlatbufferDetachedBuffer<calibration::CameraCalibration> intrinsics =
aos::JsonFileToFlatbuffer<calibration::CameraCalibration>(
- FLAGS_base_intrinsics);
+ absl::GetFlag(FLAGS_base_intrinsics));
// Now, hook Calibration up to everything.
Calibration extractor(&factory, pi_event_loop.get(), imu_event_loop.get(),
- FLAGS_pi, &intrinsics.message(), TargetType::kCharuco,
- "/camera", &data);
+ absl::GetFlag(FLAGS_pi), &intrinsics.message(),
+ TargetType::kCharuco, "/camera", &data);
- if (FLAGS_turret) {
+ if (absl::GetFlag(FLAGS_turret)) {
aos::NodeEventLoopFactory *roborio_factory =
factory.GetNodeEventLoopFactory(roborio_node->name()->string_view());
roborio_event_loop->MakeWatcher(
@@ -141,7 +142,7 @@
nominal_board_to_world.inverse())
.transpose();
- if (FLAGS_plot) {
+ if (absl::GetFlag(FLAGS_plot)) {
Plot(data, calibration_parameters);
}
}
diff --git a/y2020/vision/sift/BUILD b/y2020/vision/sift/BUILD
index a220664..96afd01 100644
--- a/y2020/vision/sift/BUILD
+++ b/y2020/vision/sift/BUILD
@@ -21,7 +21,8 @@
":get_gaussian_kernel",
"//aos/testing:googletest",
"//third_party:opencv",
- "@com_github_google_glog//:glog",
+ "@com_google_absl//absl/log",
+ "@com_google_absl//absl/log:check",
],
)
@@ -158,7 +159,8 @@
deps = [
":fast_gaussian",
"//third_party:opencv",
- "@com_github_google_glog//:glog",
+ "@com_google_absl//absl/log",
+ "@com_google_absl//absl/log:check",
],
)
@@ -175,7 +177,8 @@
":fast_gaussian_all",
"//third_party:halide_runtime",
"//third_party:opencv",
- "@com_github_google_glog//:glog",
+ "@com_google_absl//absl/log",
+ "@com_google_absl//absl/log:check",
],
)
@@ -204,7 +207,8 @@
"//aos/time",
"//third_party:opencv",
"//y2020/vision/sift:sift971",
- "@com_github_google_glog//:glog",
+ "@com_google_absl//absl/log",
+ "@com_google_absl//absl/log:check",
],
)
diff --git a/y2020/vision/sift/fast_gaussian.h b/y2020/vision/sift/fast_gaussian.h
index d9a5f07..4e246e9 100644
--- a/y2020/vision/sift/fast_gaussian.h
+++ b/y2020/vision/sift/fast_gaussian.h
@@ -3,7 +3,8 @@
#include <type_traits>
-#include "glog/logging.h"
+#include "absl/log/check.h"
+#include "absl/log/log.h"
#include <opencv2/core/mat.hpp>
#include "HalideBuffer.h"
diff --git a/y2020/vision/sift/get_gaussian_kernel_test.cc b/y2020/vision/sift/get_gaussian_kernel_test.cc
index 3d2674f..854db64 100644
--- a/y2020/vision/sift/get_gaussian_kernel_test.cc
+++ b/y2020/vision/sift/get_gaussian_kernel_test.cc
@@ -2,7 +2,8 @@
#include <tuple>
-#include "glog/logging.h"
+#include "absl/log/check.h"
+#include "absl/log/log.h"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#include <opencv2/core/mat.hpp>
diff --git a/y2020/vision/sift/sift971.cc b/y2020/vision/sift/sift971.cc
index 21f9a97..1df8d1c 100644
--- a/y2020/vision/sift/sift971.cc
+++ b/y2020/vision/sift/sift971.cc
@@ -110,7 +110,8 @@
#include <iostream>
#include <mutex>
-#include "glog/logging.h"
+#include "absl/log/check.h"
+#include "absl/log/log.h"
#include <opencv2/core/hal/hal.hpp>
#include <opencv2/imgproc.hpp>
diff --git a/y2020/vision/sift/testing_sift.cc b/y2020/vision/sift/testing_sift.cc
index c0d4ca2..73b5d08 100644
--- a/y2020/vision/sift/testing_sift.cc
+++ b/y2020/vision/sift/testing_sift.cc
@@ -1,6 +1,8 @@
#include <memory>
-#include "glog/logging.h"
+#include "absl/flags/flag.h"
+#include "absl/log/check.h"
+#include "absl/log/log.h"
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgcodecs.hpp>
#include <opencv2/imgproc.hpp>
@@ -10,15 +12,16 @@
#include "y2020/vision/sift/fast_gaussian.h"
#include "y2020/vision/sift/sift971.h"
-DEFINE_string(image, "", "Image to test with");
+ABSL_FLAG(std::string, image, "", "Image to test with");
int main(int argc, char **argv) {
aos::InitGoogle(&argc, &argv);
cv::setNumThreads(4);
- const cv::Mat raw_image = cv::imread(FLAGS_image);
- CHECK(!raw_image.empty()) << ": Failed to read: " << FLAGS_image;
+ const cv::Mat raw_image = cv::imread(absl::GetFlag(FLAGS_image));
+ CHECK(!raw_image.empty())
+ << ": Failed to read: " << absl::GetFlag(FLAGS_image);
CHECK_EQ(CV_8UC3, raw_image.type());
#if 0
cv::Mat color_image;
diff --git a/y2020/vision/tools/python_code/camera_param_test.cc b/y2020/vision/tools/python_code/camera_param_test.cc
index f47c327..68d7b4d 100644
--- a/y2020/vision/tools/python_code/camera_param_test.cc
+++ b/y2020/vision/tools/python_code/camera_param_test.cc
@@ -1,6 +1,7 @@
#include <unistd.h>
-#include "glog/logging.h"
+#include "absl/log/check.h"
+#include "absl/log/log.h"
#include "gtest/gtest.h"
#include <opencv2/features2d.hpp>
#include <opencv2/imgproc.hpp>
diff --git a/y2020/vision/viewer.cc b/y2020/vision/viewer.cc
index 8ba2955..c44490e 100644
--- a/y2020/vision/viewer.cc
+++ b/y2020/vision/viewer.cc
@@ -1,6 +1,7 @@
#include <map>
#include <random>
+#include "absl/flags/flag.h"
#include <opencv2/calib3d.hpp>
#include <opencv2/features2d.hpp>
#include <opencv2/highgui/highgui.hpp>
@@ -13,12 +14,13 @@
#include "frc971/vision/vision_generated.h"
#include "y2020/vision/sift/sift_generated.h"
-DEFINE_string(config, "aos_config.json", "Path to the config file to use.");
-DEFINE_bool(show_features, true, "Show the SIFT features that matched.");
-DEFINE_string(channel, "/camera", "Channel name for the image.");
+ABSL_FLAG(std::string, config, "aos_config.json",
+ "Path to the config file to use.");
+ABSL_FLAG(bool, show_features, true, "Show the SIFT features that matched.");
+ABSL_FLAG(std::string, channel, "/camera", "Channel name for the image.");
-DEFINE_string(capture, "",
- "If set, capture a single image and save it to this filename.");
+ABSL_FLAG(std::string, capture, "",
+ "If set, capture a single image and save it to this filename.");
namespace frc971::vision {
namespace {
@@ -72,8 +74,8 @@
cv::Mat rgb_image(cv::Size(image->cols(), image->rows()), CV_8UC3);
cv::cvtColor(image_color_mat, rgb_image, cv::COLOR_YUV2BGR_YUYV);
- if (!FLAGS_capture.empty()) {
- cv::imwrite(FLAGS_capture, rgb_image);
+ if (!absl::GetFlag(FLAGS_capture).empty()) {
+ cv::imwrite(absl::GetFlag(FLAGS_capture), rgb_image);
return false;
}
@@ -88,7 +90,8 @@
5);
}
- if (FLAGS_show_features && match->image_matches() != nullptr &&
+ if (absl::GetFlag(FLAGS_show_features) &&
+ match->image_matches() != nullptr &&
match->image_matches()->size() > 0) {
// Iterate through matches and draw matched keypoints
for (uint model_match_ind = 0;
@@ -138,15 +141,16 @@
palette_ = cv::Mat(3, 5, CV_8U, &colors);
aos::FlatbufferDetachedBuffer<aos::Configuration> config =
- aos::configuration::ReadConfig(FLAGS_config);
+ aos::configuration::ReadConfig(absl::GetFlag(FLAGS_config));
aos::ShmEventLoop event_loop(&config.message());
- image_fetcher = event_loop.MakeFetcher<CameraImage>(FLAGS_channel);
+ image_fetcher =
+ event_loop.MakeFetcher<CameraImage>(absl::GetFlag(FLAGS_channel));
// If we want to show the features, we have to use the detailed channel
- std::string result_channel = FLAGS_channel;
- if (FLAGS_show_features) {
+ std::string result_channel = absl::GetFlag(FLAGS_channel);
+ if (absl::GetFlag(FLAGS_show_features)) {
result_channel += "/detailed";
}
match_fetcher =
diff --git a/y2020/vision/viewer_replay.cc b/y2020/vision/viewer_replay.cc
index ee1c8d5..98b1f34 100644
--- a/y2020/vision/viewer_replay.cc
+++ b/y2020/vision/viewer_replay.cc
@@ -1,3 +1,4 @@
+#include "absl/flags/flag.h"
#include <opencv2/calib3d.hpp>
#include <opencv2/features2d.hpp>
#include <opencv2/highgui/highgui.hpp>
@@ -8,9 +9,9 @@
#include "aos/init.h"
#include "frc971/vision/vision_generated.h"
-DEFINE_string(node, "pi1", "Node name to replay.");
-DEFINE_string(image_save_prefix, "/tmp/img",
- "Prefix to use for saving images from the logfile.");
+ABSL_FLAG(std::string, node, "pi1", "Node name to replay.");
+ABSL_FLAG(std::string, image_save_prefix, "/tmp/img",
+ "Prefix to use for saving images from the logfile.");
namespace frc971::vision {
namespace {
@@ -22,7 +23,8 @@
reader.Register();
const aos::Node *node = nullptr;
if (aos::configuration::MultiNode(reader.configuration())) {
- node = aos::configuration::GetNode(reader.configuration(), FLAGS_node);
+ node = aos::configuration::GetNode(reader.configuration(),
+ absl::GetFlag(FLAGS_node));
}
std::unique_ptr<aos::EventLoop> event_loop =
reader.event_loop_factory()->MakeEventLoop("player", node);
@@ -38,7 +40,7 @@
}
cv::imshow("Display", image_mat);
- if (!FLAGS_image_save_prefix.empty()) {
+ if (!absl::GetFlag(FLAGS_image_save_prefix).empty()) {
cv::imwrite("/tmp/img" + std::to_string(image_count++) + ".png",
image_mat);
}