Merge "Two ball auto rapid react"
diff --git a/scouting/db/db.go b/scouting/db/db.go
index e21ca43..1f182b1 100644
--- a/scouting/db/db.go
+++ b/scouting/db/db.go
@@ -18,6 +18,11 @@
 	R1, R2, R3, B1, B2, B3 int32
 }
 
+type Shift struct {
+	MatchNumber                                                      int32
+	R1scouter, R2scouter, R3scouter, B1scouter, B2scouter, B3scouter string
+}
+
 type Stats struct {
 	TeamNumber, MatchNumber, Round int32
 	CompLevel                      string
@@ -90,6 +95,27 @@
 		return nil, errors.New(fmt.Sprint("Failed to create matches table: ", err))
 	}
 
+	statement, err = database.Prepare("CREATE TABLE IF NOT EXISTS shift_schedule (" +
+		"id SERIAL PRIMARY KEY, " +
+		"MatchNumber INTEGER, " +
+		"R1Scouter VARCHAR, " +
+		"R2Scouter VARCHAR, " +
+		"R3Scouter VARCHAR, " +
+		"B1Scouter VARCHAR, " +
+		"B2Scouter VARCHAR, " +
+		"B3scouter VARCHAR)")
+	if err != nil {
+		database.Close()
+		return nil, errors.New(fmt.Sprint("Failed to prepare shift schedule table creation: ", err))
+	}
+	defer statement.Close()
+
+	_, err = statement.Exec()
+	if err != nil {
+		database.Close()
+		return nil, errors.New(fmt.Sprint("Failed to create shift schedule table: ", err))
+	}
+
 	statement, err = database.Prepare("CREATE TABLE IF NOT EXISTS team_match_stats (" +
 		"TeamNumber INTEGER, " +
 		"MatchNumber INTEGER, " +
@@ -170,6 +196,15 @@
 		return errors.New(fmt.Sprint("Failed to drop matches table: ", err))
 	}
 
+	statement, err = database.Prepare("DROP TABLE IF EXISTS shift_schedule")
+	if err != nil {
+		return errors.New(fmt.Sprint("Failed to prepare dropping shifts table: ", err))
+	}
+	_, err = statement.Exec()
+	if err != nil {
+		return errors.New(fmt.Sprint("Failed to drop shifts table: ", err))
+	}
+
 	statement, err = database.Prepare("DROP TABLE IF EXISTS team_match_stats")
 	if err != nil {
 		return errors.New(fmt.Sprint("Failed to prepare dropping stats table: ", err))
@@ -224,6 +259,26 @@
 	return nil
 }
 
+func (database *Database) AddToShift(sh Shift) error {
+	statement, err := database.Prepare("INSERT INTO shift_schedule(" +
+		"MatchNumber, " +
+		"R1scouter, R2scouter, R3scouter, B1scouter, B2scouter, B3scouter) " +
+		"VALUES (" +
+		"$1, " +
+		"$2, $3, $4, $5, $6, $7)")
+	if err != nil {
+		return errors.New(fmt.Sprint("Failed to prepare insertion into shift database: ", err))
+	}
+	defer statement.Close()
+
+	_, err = statement.Exec(sh.MatchNumber,
+		sh.R1scouter, sh.R2scouter, sh.R3scouter, sh.B1scouter, sh.B2scouter, sh.B3scouter)
+	if err != nil {
+		return errors.New(fmt.Sprint("Failed to insert into shift database: ", err))
+	}
+	return nil
+}
+
 func (database *Database) AddToStats(s Stats) error {
 	matches, err := database.QueryMatches(s.TeamNumber)
 	if err != nil {
@@ -343,6 +398,27 @@
 	return matches, nil
 }
 
+func (database *Database) ReturnAllShifts() ([]Shift, error) {
+	rows, err := database.Query("SELECT * FROM shift_schedule")
+	if err != nil {
+		return nil, errors.New(fmt.Sprint("Failed to select from shift: ", err))
+	}
+	defer rows.Close()
+
+	shifts := make([]Shift, 0)
+	for rows.Next() {
+		var shift Shift
+		var id int
+		err := rows.Scan(&id, &shift.MatchNumber,
+			&shift.R1scouter, &shift.R2scouter, &shift.R3scouter, &shift.B1scouter, &shift.B2scouter, &shift.B3scouter)
+		if err != nil {
+			return nil, errors.New(fmt.Sprint("Failed to scan from shift: ", err))
+		}
+		shifts = append(shifts, shift)
+	}
+	return shifts, nil
+}
+
 func (database *Database) ReturnStats() ([]Stats, error) {
 	rows, err := database.Query("SELECT * FROM team_match_stats")
 	if err != nil {
@@ -414,6 +490,27 @@
 	return matches, nil
 }
 
+func (database *Database) QueryAllShifts(matchNumber_ int) ([]Shift, error) {
+	rows, err := database.Query("SELECT * FROM shift_schedule WHERE MatchNumber = $1", matchNumber_)
+	if err != nil {
+		return nil, errors.New(fmt.Sprint("Failed to select from shift for team: ", err))
+	}
+	defer rows.Close()
+
+	var shifts []Shift
+	for rows.Next() {
+		var shift Shift
+		var id int
+		err = rows.Scan(&id, &shift.MatchNumber,
+			&shift.R1scouter, &shift.R2scouter, &shift.R3scouter, &shift.B1scouter, &shift.B2scouter, &shift.B3scouter)
+		if err != nil {
+			return nil, errors.New(fmt.Sprint("Failed to scan from matches: ", err))
+		}
+		shifts = append(shifts, shift)
+	}
+	return shifts, nil
+}
+
 func (database *Database) QueryStats(teamNumber_ int) ([]Stats, error) {
 	rows, err := database.Query("SELECT * FROM team_match_stats WHERE TeamNumber = $1", teamNumber_)
 	if err != nil {
diff --git a/scouting/db/db_test.go b/scouting/db/db_test.go
index 391f336..f19b68c 100644
--- a/scouting/db/db_test.go
+++ b/scouting/db/db_test.go
@@ -261,6 +261,41 @@
 	}
 }
 
+func TestQueryShiftDB(t *testing.T) {
+	fixture := createDatabase(t)
+	defer fixture.TearDown()
+
+	testDatabase := []Shift{
+		Shift{
+			MatchNumber: 1,
+			R1scouter:   "Bob1", R2scouter: "Bob2", R3scouter: "Bob3", B1scouter: "Alice1", B2scouter: "Alice2", B3scouter: "Alice3",
+		},
+		Shift{
+			MatchNumber: 2,
+			R1scouter:   "Bob1", R2scouter: "Bob2", R3scouter: "Bob3", B1scouter: "Alice1", B2scouter: "Alice2", B3scouter: "Alice3",
+		},
+	}
+
+	for i := 0; i < len(testDatabase); i++ {
+		err := fixture.db.AddToShift(testDatabase[i])
+		check(t, err, fmt.Sprint("Failed to add shift", i))
+	}
+
+	correct := []Shift{
+		Shift{
+			MatchNumber: 1,
+			R1scouter:   "Bob1", R2scouter: "Bob2", R3scouter: "Bob3", B1scouter: "Alice1", B2scouter: "Alice2", B3scouter: "Alice3",
+		},
+	}
+
+	got, err := fixture.db.QueryAllShifts(1)
+	check(t, err, "Failed to query shift for match 1")
+
+	if !reflect.DeepEqual(correct, got) {
+		t.Fatalf("Got %#v,\nbut expected %#v.", got, correct)
+	}
+}
+
 func TestQueryStatsDB(t *testing.T) {
 	fixture := createDatabase(t)
 	defer fixture.TearDown()
@@ -475,6 +510,34 @@
 	}
 }
 
+func TestAddReturnShiftDB(t *testing.T) {
+	fixture := createDatabase(t)
+	defer fixture.TearDown()
+
+	correct := []Shift{
+		Shift{
+			MatchNumber: 1,
+			R1scouter:   "Bob1", R2scouter: "Bob2", R3scouter: "Bob3", B1scouter: "Alice1", B2scouter: "Alice2", B3scouter: "Alice3",
+		},
+		Shift{
+			MatchNumber: 2,
+			R1scouter:   "Bob1", R2scouter: "Bob2", R3scouter: "Bob3", B1scouter: "Alice1", B2scouter: "Alice2", B3scouter: "Alice3",
+		},
+	}
+
+	for i := 0; i < len(correct); i++ {
+		err := fixture.db.AddToShift(correct[i])
+		check(t, err, fmt.Sprint("Failed to add shift", i))
+	}
+
+	got, err := fixture.db.ReturnAllShifts()
+	check(t, err, "Failed ReturnAllShifts()")
+
+	if !reflect.DeepEqual(correct, got) {
+		t.Errorf("Got %#v,\nbut expected %#v.", got, correct)
+	}
+}
+
 func TestReturnRankingsDB(t *testing.T) {
 	fixture := createDatabase(t)
 	defer fixture.TearDown()
diff --git a/y2022/constants.cc b/y2022/constants.cc
index 9d27780..fb91cc5 100644
--- a/y2022/constants.cc
+++ b/y2022/constants.cc
@@ -139,21 +139,23 @@
 
   // Interpolation table for comp and practice robots
   r.shot_interpolation_table = InterpolationTable<Values::ShotParams>({
-      {1.0, {0.0, 19.0}},
-      {1.6, {0.0, 19.0}},
-      {1.9, {0.1, 19.0}},
-      {2.12, {0.15, 18.8}},
-      {2.9, {0.25, 19.2}},
-      {3.2, {0.28, 20.3}},
+      {1.0, {0.05, 19.4}},
+      {1.6, {0.05, 19.4}},
+      {1.9, {0.1, 19.4}},
+      {2.12, {0.13, 19.4}},
+      {2.9, {0.24, 19.9}},
 
-      {3.60, {0.33, 20.3}},
-      {4.9, {0.4, 21.9}},
+      {3.2, {0.26, 20.7}},
+
+      {3.60, {0.33, 20.9}},
+      {4.50, {0.38, 22.5}},
+      {4.9, {0.4, 22.9}},
       {5.4, {0.4, 23.9}},
-      {6.0, {0.40, 25.0}},
-      {7.0, {0.37, 27.1}},
 
-      {7.8, {0.35, 28.0}},
-      {10.0, {0.35, 28.0}},
+      {6.0, {0.40, 25.4}},
+      {7.0, {0.37, 28.1}},
+
+      {10.0, {0.37, 28.1}},
   });
 
   if (false) {
diff --git a/y2022/control_loops/superstructure/catapult/catapult.cc b/y2022/control_loops/superstructure/catapult/catapult.cc
index a04d8c9..612a17a 100644
--- a/y2022/control_loops/superstructure/catapult/catapult.cc
+++ b/y2022/control_loops/superstructure/catapult/catapult.cc
@@ -390,6 +390,9 @@
         } else {
           // TODO(austin): Voltage error?
           CHECK_NOTNULL(catapult_voltage);
+          if (current_horizon_ == 1) {
+            battery_voltage = 12.0;
+          }
           *catapult_voltage = std::max(
               0.0, std::min(12.0, (*solution - 0.0 * next_X(2, 0)) * 12.0 /
                                       std::max(battery_voltage, 8.0)));
diff --git a/y2022/control_loops/superstructure/superstructure.cc b/y2022/control_loops/superstructure/superstructure.cc
index c59737d..24bb00d 100644
--- a/y2022/control_loops/superstructure/superstructure.cc
+++ b/y2022/control_loops/superstructure/superstructure.cc
@@ -281,19 +281,11 @@
       (turret_intake_state_ == RequestedIntake::kFront
            ? constants::Values::kTurretFrontIntakePos()
            : constants::Values::kTurretBackIntakePos());
-  // Turn to the loading position as close to the current position as
-  // possible.
-  turret_loading_position =
-      turret_.estimated_position() +
-      aos::math::NormalizeAngle(turret_loading_position -
-                                turret_.estimated_position());
-  // if out of range, reset back to within +/- pi of zero.
-  if (turret_loading_position > values_->turret_range.upper ||
-      turret_loading_position < values_->turret_range.lower) {
-    turret_loading_position =
-        frc971::zeroing::Wrap(values_->turret_range.middle_soft(),
-                              turret_loading_position, 2.0 * M_PI);
-  }
+  // Turn to the loading position as close to the middle of the range as
+  // possible. Do the unwraping before we have a ball so we don't have to unwrap
+  // to shoot.
+  turret_loading_position = frc971::zeroing::Wrap(
+      values_->turret_range.middle_soft(), turret_loading_position, 2.0 * M_PI);
 
   turret_loading_goal_buffer.Finish(
       frc971::control_loops::CreateStaticZeroingSingleDOFProfiledSubsystemGoal(
@@ -357,6 +349,7 @@
       }
 
       turret_goal = &turret_loading_goal_buffer.message();
+      aimer_.UpdateTurretGoal(turret_loading_position);
 
       const bool turret_near_goal =
           std::abs(turret_.estimated_position() - turret_loading_position) <
@@ -495,7 +488,7 @@
 
       // Once the shot is complete and the catapult is back to its return
       // position, go back to IDLE
-      if (catapult_.shot_count() > prev_shot_count_ ) {
+      if (catapult_.shot_count() > prev_shot_count_) {
         prev_shot_count_ = catapult_.shot_count();
         fire_ = false;
         discarding_ball_ = false;
diff --git a/y2022/control_loops/superstructure/superstructure_lib_test.cc b/y2022/control_loops/superstructure/superstructure_lib_test.cc
index ff243e2..cbc71d4 100644
--- a/y2022/control_loops/superstructure/superstructure_lib_test.cc
+++ b/y2022/control_loops/superstructure/superstructure_lib_test.cc
@@ -998,7 +998,7 @@
   EXPECT_EQ(superstructure_status_fetcher_->intake_state(),
             IntakeState::INTAKE_BACK_BALL);
   EXPECT_NEAR(superstructure_status_fetcher_->turret()->position(),
-              -constants::Values::kTurretBackIntakePos(), 0.001);
+              constants::Values::kTurretBackIntakePos(), 0.001);
 
   // Since the intake beambreak hasn't triggered in a while, it should realize
   // the ball was lost.
@@ -1010,7 +1010,7 @@
             IntakeState::NO_BALL);
 }
 
-TEST_F(SuperstructureTest, TestTurretWrapsWhenLoading) {
+TEST_F(SuperstructureTest, TestTurretUnWrapsWhenLoading) {
   SetEnabled(true);
   WaitUntilZeroed();
 
@@ -1032,21 +1032,22 @@
   EXPECT_NEAR(superstructure_status_fetcher_->turret()->position(), kTurretGoal,
               0.001);
 
-  superstructure_plant_.set_intake_beambreak_front(true);
+  superstructure_plant_.set_intake_beambreak_back(true);
   RunFor(dt() * 2);
 
   ASSERT_TRUE(superstructure_status_fetcher_.Fetch());
   EXPECT_EQ(superstructure_status_fetcher_->state(),
             SuperstructureState::TRANSFERRING);
   EXPECT_EQ(superstructure_status_fetcher_->intake_state(),
-            IntakeState::INTAKE_FRONT_BALL);
+            IntakeState::INTAKE_BACK_BALL);
 
   RunFor(std::chrono::seconds(3));
 
   ASSERT_TRUE(superstructure_status_fetcher_.Fetch());
   EXPECT_NEAR(superstructure_status_fetcher_->turret()->position(),
-              -constants::Values::kTurretFrontIntakePos() - 2.0 * M_PI, 0.001);
-  // it chooses -pi because -pi is closer to -4 than positive pi
+              constants::Values::kTurretBackIntakePos(), 0.001);
+  // It goes to -pi instead of +pi because -pi is closest to the center of the
+  // range at -1.675.
 }
 
 // Make sure that the front and back intakes are never switched
diff --git a/y2022/control_loops/superstructure/turret/aiming.h b/y2022/control_loops/superstructure/turret/aiming.h
index 4eabe3e..a762298 100644
--- a/y2022/control_loops/superstructure/turret/aiming.h
+++ b/y2022/control_loops/superstructure/turret/aiming.h
@@ -24,6 +24,9 @@
 
   void Update(const Status *status, ShotMode shot_mode);
 
+  void UpdateTurretGoal(double turret_goal) {
+    goal_.mutable_message()->mutate_unsafe_goal(turret_goal);
+  }
   const Goal *TurretGoal() const { return &goal_.message(); }
 
   // Returns the distance to the goal, in meters.
diff --git a/y2022/vision/BUILD b/y2022/vision/BUILD
index 65ab20c..6325234 100644
--- a/y2022/vision/BUILD
+++ b/y2022/vision/BUILD
@@ -317,6 +317,7 @@
     deps = [
         ":blob_detector_lib",
         ":calibration_data",
+        ":camera_reader_lib",
         ":target_estimator_lib",
         "//aos:init",
         "//aos/events:shm_event_loop",
diff --git a/y2022/vision/blob_detector.cc b/y2022/vision/blob_detector.cc
index 93c72d7..2f90513 100644
--- a/y2022/vision/blob_detector.cc
+++ b/y2022/vision/blob_detector.cc
@@ -133,7 +133,7 @@
   }
 
   // Threshold for mean distance from a blob centroid to a circle.
-  constexpr double kCircleDistanceThreshold = 1.0;
+  constexpr double kCircleDistanceThreshold = 2.0;
   // We should only expect to see blobs between these angles on a circle.
   constexpr double kDegToRad = M_PI / 180.0;
   constexpr double kMinBlobAngle = 50.0 * kDegToRad;
diff --git a/y2022/vision/camera_reader.cc b/y2022/vision/camera_reader.cc
index 92d3727..0af4afc 100644
--- a/y2022/vision/camera_reader.cc
+++ b/y2022/vision/camera_reader.cc
@@ -21,12 +21,11 @@
 
 using namespace frc971::vision;
 
-const calibration::CameraCalibration *CameraReader::FindCameraCalibration()
-    const {
-  const std::string_view node_name = event_loop_->node()->name()->string_view();
-  const int team_number = aos::network::GetTeamNumber();
+const calibration::CameraCalibration *CameraReader::FindCameraCalibration(
+    const calibration::CalibrationData *calibration_data,
+    std::string_view node_name, int team_number) {
   for (const calibration::CameraCalibration *candidate :
-       *calibration_data_->camera_calibrations()) {
+       *calibration_data->camera_calibrations()) {
     if (candidate->node_name()->string_view() != node_name) {
       continue;
     }
@@ -92,9 +91,7 @@
 
 void CameraReader::ProcessImage(cv::Mat image_mat_distorted,
                                 int64_t image_monotonic_timestamp_ns) {
-  cv::Mat image_mat;
-  cv::undistort(image_mat_distorted, image_mat, CameraIntrinsics(),
-                CameraDistCoeffs());
+  cv::Mat image_mat = UndistortImage(image_mat_distorted, undistort_maps_);
 
   BlobDetector::BlobResult blob_result;
   BlobDetector::ExtractBlobs(image_mat, &blob_result);
diff --git a/y2022/vision/camera_reader.h b/y2022/vision/camera_reader.h
index 7128890..8317c09 100644
--- a/y2022/vision/camera_reader.h
+++ b/y2022/vision/camera_reader.h
@@ -29,12 +29,73 @@
 // TODO<jim>: Probably need to break out LED control to separate process
 class CameraReader {
  public:
+  static const calibration::CameraCalibration *FindCameraCalibration(
+      const calibration::CalibrationData *calibration_data,
+      std::string_view node_name, int team_number);
+
+  static cv::Mat CameraIntrinsics(
+      const calibration::CameraCalibration *camera_calibration) {
+    cv::Mat result(3, 3, CV_32F,
+                   const_cast<void *>(static_cast<const void *>(
+                       camera_calibration->intrinsics()->data())));
+    result.convertTo(result, CV_64F);
+    CHECK_EQ(result.total(), camera_calibration->intrinsics()->size());
+    return result;
+  }
+
+  static cv::Mat CameraExtrinsics(
+      const calibration::CameraCalibration *camera_calibration) {
+    // TODO(james): What's the principled way to handle non-z-axis turrets?
+    const frc971::vision::calibration::TransformationMatrix *transform =
+        camera_calibration->has_turret_extrinsics()
+            ? camera_calibration->turret_extrinsics()
+            : camera_calibration->fixed_extrinsics();
+
+    cv::Mat result(4, 4, CV_32F,
+                   const_cast<void *>(
+                       static_cast<const void *>(transform->data()->data())));
+    result.convertTo(result, CV_64F);
+    CHECK_EQ(result.total(), transform->data()->size());
+    return result;
+  }
+
+  static cv::Mat CameraDistCoeffs(
+      const calibration::CameraCalibration *camera_calibration) {
+    const cv::Mat result(5, 1, CV_32F,
+                         const_cast<void *>(static_cast<const void *>(
+                             camera_calibration->dist_coeffs()->data())));
+    CHECK_EQ(result.total(), camera_calibration->dist_coeffs()->size());
+    return result;
+  }
+
+  static std::pair<cv::Mat, cv::Mat> ComputeUndistortMaps(
+      const cv::Mat intrinsics, const cv::Mat dist_coeffs) {
+    std::pair<cv::Mat, cv::Mat> undistort_maps;
+    static const cv::Size kImageSize = {640, 480};
+    cv::initUndistortRectifyMap(intrinsics, dist_coeffs, cv::Mat(), intrinsics,
+                                kImageSize, CV_16SC2, undistort_maps.first,
+                                undistort_maps.second);
+    return undistort_maps;
+  }
+
+  static cv::Mat UndistortImage(cv::Mat image_distorted,
+                                std::pair<cv::Mat, cv::Mat> undistort_maps) {
+    cv::Mat image;
+    cv::remap(image_distorted, image, undistort_maps.first,
+              undistort_maps.second, cv::INTER_LINEAR);
+    return image;
+  }
+
   CameraReader(aos::ShmEventLoop *event_loop,
                const calibration::CalibrationData *calibration_data,
                V4L2Reader *reader)
       : event_loop_(event_loop),
         calibration_data_(calibration_data),
-        camera_calibration_(FindCameraCalibration()),
+        camera_calibration_(FindCameraCalibration(
+            calibration_data_, event_loop_->node()->name()->string_view(),
+            aos::network::GetTeamNumber())),
+        undistort_maps_(
+            ComputeUndistortMaps(CameraIntrinsics(), CameraDistCoeffs())),
         reader_(reader),
         image_sender_(event_loop->MakeSender<CameraImage>("/camera")),
         target_estimator_(CameraIntrinsics(), CameraExtrinsics()),
@@ -60,8 +121,6 @@
   double GetDutyCycle() { return duty_cycle_; }
 
  private:
-  const calibration::CameraCalibration *FindCameraCalibration() const;
-
   // Processes an image (including sending the results).
   void ProcessImage(cv::Mat image_mat_distorted,
                     int64_t image_monotonic_timestamp_ns);
@@ -70,40 +129,21 @@
   void ReadImage();
 
   cv::Mat CameraIntrinsics() const {
-    cv::Mat result(3, 3, CV_32F,
-                   const_cast<void *>(static_cast<const void *>(
-                       camera_calibration_->intrinsics()->data())));
-    result.convertTo(result, CV_64F);
-    CHECK_EQ(result.total(), camera_calibration_->intrinsics()->size());
-    return result;
+    return CameraIntrinsics(camera_calibration_);
   }
 
   cv::Mat CameraExtrinsics() const {
-    // TODO(james): What's the principled way to handle non-z-axis turrets?
-    const frc971::vision::calibration::TransformationMatrix *transform =
-        camera_calibration_->has_turret_extrinsics()
-            ? camera_calibration_->turret_extrinsics()
-            : camera_calibration_->fixed_extrinsics();
-
-    cv::Mat result(4, 4, CV_32F,
-                   const_cast<void *>(
-                       static_cast<const void *>(transform->data()->data())));
-    result.convertTo(result, CV_64F);
-    CHECK_EQ(result.total(), transform->data()->size());
-    return result;
+    return CameraExtrinsics(camera_calibration_);
   }
 
   cv::Mat CameraDistCoeffs() const {
-    const cv::Mat result(5, 1, CV_32F,
-                         const_cast<void *>(static_cast<const void *>(
-                             camera_calibration_->dist_coeffs()->data())));
-    CHECK_EQ(result.total(), camera_calibration_->dist_coeffs()->size());
-    return result;
+    return CameraDistCoeffs(camera_calibration_);
   }
 
   aos::ShmEventLoop *const event_loop_;
   const calibration::CalibrationData *const calibration_data_;
   const calibration::CameraCalibration *const camera_calibration_;
+  std::pair<cv::Mat, cv::Mat> undistort_maps_;
   V4L2Reader *const reader_;
   aos::Sender<CameraImage> image_sender_;
   TargetEstimator target_estimator_;
diff --git a/y2022/vision/target_estimator.cc b/y2022/vision/target_estimator.cc
index 9eef390..1447d81 100644
--- a/y2022/vision/target_estimator.cc
+++ b/y2022/vision/target_estimator.cc
@@ -378,16 +378,16 @@
       [](const std::pair<size_t, size_t> &a,
          const std::pair<size_t, size_t> &b) { return a.first < b.first; });
 
-  size_t middle_tape_index = 1000;
+  std::optional<size_t> middle_tape_index = std::nullopt;
   for (size_t i = 0; i < tape_indices.size(); ++i) {
     if (tape_indices[i].second == middle_blob_index_) {
       middle_tape_index = i;
     }
   }
-  CHECK_NE(middle_tape_index, 1000) << "Failed to find middle tape";
+  CHECK(middle_tape_index.has_value()) << "Failed to find middle tape";
 
   if (VLOG_IS_ON(2)) {
-    LOG(INFO) << "Middle tape is " << middle_tape_index << ", blob "
+    LOG(INFO) << "Middle tape is " << *middle_tape_index << ", blob "
               << middle_blob_index_;
     for (size_t i = 0; i < tape_indices.size(); ++i) {
       const auto distance = DistanceFromTapeIndex(
@@ -400,7 +400,7 @@
 
   {
     size_t offset = 0;
-    for (size_t i = middle_tape_index + 1; i < tape_indices.size(); ++i) {
+    for (size_t i = *middle_tape_index + 1; i < tape_indices.size(); ++i) {
       tape_indices[i].first -= offset;
 
       if (tape_indices[i].first > tape_indices[i - 1].first + 1) {
@@ -412,7 +412,7 @@
   }
 
   if (VLOG_IS_ON(2)) {
-    LOG(INFO) << "Middle tape is " << middle_tape_index << ", blob "
+    LOG(INFO) << "Middle tape is " << *middle_tape_index << ", blob "
               << middle_blob_index_;
     for (size_t i = 0; i < tape_indices.size(); ++i) {
       const auto distance = DistanceFromTapeIndex(
@@ -425,7 +425,7 @@
 
   {
     size_t offset = 0;
-    for (size_t i = middle_tape_index; i > 0; --i) {
+    for (size_t i = *middle_tape_index; i > 0; --i) {
       tape_indices[i - 1].first -= offset;
 
       if (tape_indices[i - 1].first + 1 < tape_indices[i].first) {
@@ -440,7 +440,7 @@
   }
 
   if (VLOG_IS_ON(2)) {
-    LOG(INFO) << "Middle tape is " << middle_tape_index << ", blob "
+    LOG(INFO) << "Middle tape is " << *middle_tape_index << ", blob "
               << middle_blob_index_;
     for (size_t i = 0; i < tape_indices.size(); ++i) {
       const auto distance = DistanceFromTapeIndex(
@@ -566,11 +566,11 @@
     size_t blob_index, const std::vector<cv::Point_<S>> &tape_points) const {
   auto distance = cv::Point_<S>(std::numeric_limits<S>::infinity(),
                                 std::numeric_limits<S>::infinity());
-  size_t final_match = 255;
+  std::optional<size_t> final_match = std::nullopt;
   if (blob_index == middle_blob_index_) {
     // Fix the middle blob so the solver can't go too far off
     final_match = tape_points.size() / 2;
-    distance = DistanceFromTapeIndex(blob_index, final_match, tape_points);
+    distance = DistanceFromTapeIndex(blob_index, *final_match, tape_points);
   } else {
     // Give the other blob_stats some freedom in case some are split into pieces
     for (auto it = tape_points.begin(); it < tape_points.end(); it++) {
@@ -585,11 +585,11 @@
     }
   }
 
-  VLOG(2) << "Matched index " << blob_index << " to " << final_match
+  CHECK(final_match.has_value());
+  VLOG(2) << "Matched index " << blob_index << " to " << *final_match
           << " distance " << distance.x << " " << distance.y;
-  CHECK_NE(final_match, 255);
 
-  return final_match;
+  return *final_match;
 }
 
 void TargetEstimator::DrawProjectedHub(
diff --git a/y2022/vision/viewer.cc b/y2022/vision/viewer.cc
index 446f1f6..e455f66 100644
--- a/y2022/vision/viewer.cc
+++ b/y2022/vision/viewer.cc
@@ -13,6 +13,7 @@
 #include "frc971/vision/vision_generated.h"
 #include "y2022/vision/blob_detector.h"
 #include "y2022/vision/calibration_data.h"
+#include "y2022/vision/camera_reader.h"
 #include "y2022/vision/target_estimate_generated.h"
 #include "y2022/vision/target_estimator.h"
 
@@ -213,51 +214,24 @@
   const aos::FlatbufferSpan<calibration::CalibrationData> calibration_data(
       CalibrationData());
 
-  const calibration::CameraCalibration *calibration = nullptr;
-  for (const calibration::CameraCalibration *candidate :
-       *calibration_data.message().camera_calibrations()) {
-    if ((candidate->node_name()->string_view() == FLAGS_calibration_node) &&
-        (candidate->team_number() == FLAGS_calibration_team_number)) {
-      calibration = candidate;
-      break;
-    }
-  }
+  const calibration::CameraCalibration *calibration =
+      CameraReader::FindCameraCalibration(&calibration_data.message(),
+                                          FLAGS_calibration_node,
+                                          FLAGS_calibration_team_number);
+  const auto intrinsics = CameraReader::CameraIntrinsics(calibration);
+  const auto extrinsics = CameraReader::CameraExtrinsics(calibration);
+  const auto dist_coeffs = CameraReader::CameraDistCoeffs(calibration);
 
-  CHECK(calibration) << "No calibration data found for node \""
-                     << FLAGS_calibration_node << "\" with team number "
-                     << FLAGS_calibration_team_number;
-
-  const auto intrinsics_float = cv::Mat(
-      3, 3, CV_32F,
-      const_cast<void *>(
-          static_cast<const void *>(calibration->intrinsics()->data())));
-  cv::Mat intrinsics;
-  intrinsics_float.convertTo(intrinsics, CV_64F);
-
-  const frc971::vision::calibration::TransformationMatrix *transform =
-      calibration->has_turret_extrinsics() ? calibration->turret_extrinsics()
-                                           : calibration->fixed_extrinsics();
-
-  const auto extrinsics_float = cv::Mat(
-      4, 4, CV_32F,
-      const_cast<void *>(static_cast<const void *>(transform->data()->data())));
-  cv::Mat extrinsics;
-  extrinsics_float.convertTo(extrinsics, CV_64F);
-
-  const auto dist_coeffs_float = cv::Mat(
-      5, 1, CV_32F,
-      const_cast<void *>(
-          static_cast<const void *>(calibration->dist_coeffs()->data())));
-  cv::Mat dist_coeffs;
-  dist_coeffs_float.convertTo(dist_coeffs, CV_64F);
+  // Compute undistortion map once for efficiency
+  const auto undistort_maps =
+      CameraReader::ComputeUndistortMaps(intrinsics, dist_coeffs);
 
   TargetEstimator estimator(intrinsics, extrinsics);
 
   for (auto it = file_list.begin() + FLAGS_skip; it < file_list.end(); it++) {
     LOG(INFO) << "Reading file " << (it - file_list.begin()) << ": " << *it;
-    cv::Mat image_mat_distorted = cv::imread(it->c_str());
-    cv::Mat image_mat;
-    cv::undistort(image_mat_distorted, image_mat, intrinsics, dist_coeffs);
+    cv::Mat image_mat =
+        CameraReader::UndistortImage(cv::imread(it->c_str()), undistort_maps);
 
     BlobDetector::BlobResult blob_result;
     blob_result.binarized_image =
diff --git a/y2022/vision/viewer_replay.cc b/y2022/vision/viewer_replay.cc
index b2d3464..5d09d55 100644
--- a/y2022/vision/viewer_replay.cc
+++ b/y2022/vision/viewer_replay.cc
@@ -194,6 +194,8 @@
 
         bool use_image = true;
         if (FLAGS_detected_only || FLAGS_filtered_only) {
+          // TODO(milind): if adding target estimation here in the future,
+          // undistortion is needed
           BlobDetector::BlobResult blob_result;
           BlobDetector::ExtractBlobs(image_mat, &blob_result);
 
diff --git a/y2022/www/field_handler.ts b/y2022/www/field_handler.ts
index 22989be..88c2864 100644
--- a/y2022/www/field_handler.ts
+++ b/y2022/www/field_handler.ts
@@ -161,9 +161,12 @@
 
   drawField(): void {
     const ctx = this.canvas.getContext('2d');
+    ctx.save();
+    ctx.scale(-1.0, 1.0);
     ctx.drawImage(
         this.fieldImage, 0, 0, this.fieldImage.width, this.fieldImage.height,
         -FIELD_EDGE_X, -FIELD_SIDE_Y, FIELD_LENGTH, FIELD_WIDTH);
+    ctx.restore();
   }
 
   drawCamera(