Merge "Add config validator to y2023"
diff --git a/.bazelignore b/.bazelignore
index 784e4c1..2409a99 100644
--- a/.bazelignore
+++ b/.bazelignore
@@ -4,7 +4,6 @@
 scouting/www/counter_button/node_modules
 scouting/www/driver_ranking/node_modules
 scouting/www/entry/node_modules
-scouting/www/import_match_list/node_modules
 scouting/www/match_list/node_modules
 scouting/www/notes/node_modules
 scouting/www/rpc/node_modules
diff --git a/WORKSPACE b/WORKSPACE
index 817e5ba..a7630d1 100644
--- a/WORKSPACE
+++ b/WORKSPACE
@@ -959,7 +959,6 @@
         "@//scouting/www/counter_button:package.json",
         "@//scouting/www/driver_ranking:package.json",
         "@//scouting/www/entry:package.json",
-        "@//scouting/www/import_match_list:package.json",
         "@//scouting/www/match_list:package.json",
         "@//scouting/www/notes:package.json",
         "@//scouting/www/rpc:package.json",
diff --git a/frc971/control_loops/control_loops.fbs b/frc971/control_loops/control_loops.fbs
index 243e2cb..da2dc05 100644
--- a/frc971/control_loops/control_loops.fbs
+++ b/frc971/control_loops/control_loops.fbs
@@ -86,6 +86,13 @@
   encoder:double (id: 0);
 }
 
+// An enum to represent the different types of errors
+// a zeroing estimator could have.
+enum ZeroingError : short {
+  OFFSET_MOVED_TOO_FAR,
+  LOST_ABSOLUTE_ENCODER
+}
+
 // The internal state of a zeroing estimator.
 table EstimatorState {
   // If true, there has been a fatal error for the estimator.
@@ -114,6 +121,9 @@
   // The estimated absolute position of the encoder.  This is filtered, so it
   // can be easily used when zeroing.
   absolute_position:double (id: 4);
+
+  // If errored, this contains the causes for the error.
+  errors: [ZeroingError] (id: 5);
 }
 
 // The internal state of a zeroing estimator.
@@ -128,6 +138,9 @@
   // The estimated absolute position of the encoder.  This is filtered, so it
   // can be easily used when zeroing.
   absolute_position:double (id: 3);
+
+  // If errored, this contains the causes for the error.
+  errors: [ZeroingError] (id: 4);
 }
 
 // The internal state of a zeroing estimator.
@@ -145,8 +158,12 @@
 
   // Estimated absolute position of the single turn absolute encoder.
   single_turn_absolute_position:double (id: 4);
+
+  // If errored, this contains the causes for the error.
+  errors: [ZeroingError] (id: 5);
 }
 
+
 table RelativeEncoderEstimatorState {
   // If true, there has been a fatal error for the estimator.
   error:bool (id: 0);
diff --git a/frc971/zeroing/BUILD b/frc971/zeroing/BUILD
index 9ec2772..d50f181 100644
--- a/frc971/zeroing/BUILD
+++ b/frc971/zeroing/BUILD
@@ -80,6 +80,7 @@
     target_compatible_with = ["@platforms//os:linux"],
     deps = [
         ":wrap",
+        "//aos/containers:error_list",
         "//aos/logging",
         "//frc971:constants",
         "//frc971/control_loops:control_loops_fbs",
diff --git a/frc971/zeroing/absolute_and_absolute_encoder.cc b/frc971/zeroing/absolute_and_absolute_encoder.cc
index 40b0519..03ef3f1 100644
--- a/frc971/zeroing/absolute_and_absolute_encoder.cc
+++ b/frc971/zeroing/absolute_and_absolute_encoder.cc
@@ -71,6 +71,7 @@
     if (zeroed_) {
       VLOG(1) << "NAN on one of the absolute encoders.";
       error_ = true;
+      errors_.Set(ZeroingError::LOST_ABSOLUTE_ENCODER);
     } else {
       ++nan_samples_;
       VLOG(1) << "NAN on one of the absolute encoders while zeroing"
@@ -78,6 +79,7 @@
       if (nan_samples_ >= constants_.average_filter_size) {
         error_ = true;
         zeroed_ = true;
+        errors_.Set(ZeroingError::LOST_ABSOLUTE_ENCODER);
       }
     }
     // Throw some dummy values in for now.
@@ -189,11 +191,6 @@
          (-constants_.single_turn_measured_absolute_position +
           what_Unwrap_added));
 
-    /*
-    filtered_single_turn_absolute_encoder_ =
-        sample.encoder - single_turn_to_relative_encoder_offset_;
-    */
-
     if (!zeroed_) {
       first_offset_ = offset_;
     }
@@ -209,6 +206,7 @@
                 constants_.allowable_encoder_error *
                     constants_.one_revolution_distance);
         error_ = true;
+        errors_.Set(ZeroingError::OFFSET_MOVED_TOO_FAR);
       }
 
       zeroed_ = true;
@@ -222,8 +220,12 @@
 flatbuffers::Offset<AbsoluteAndAbsoluteEncoderZeroingEstimator::State>
 AbsoluteAndAbsoluteEncoderZeroingEstimator::GetEstimatorState(
     flatbuffers::FlatBufferBuilder *fbb) const {
+  flatbuffers::Offset<flatbuffers::Vector<ZeroingError>> errors_offset =
+      errors_.ToFlatbuffer(fbb);
+
   State::Builder builder(*fbb);
   builder.add_error(error_);
+  builder.add_errors(errors_offset);
   builder.add_zeroed(zeroed_);
   builder.add_position(position_);
   builder.add_absolute_position(filtered_absolute_encoder_);
diff --git a/frc971/zeroing/absolute_and_absolute_encoder.h b/frc971/zeroing/absolute_and_absolute_encoder.h
index 499f7d1..509d5c5 100644
--- a/frc971/zeroing/absolute_and_absolute_encoder.h
+++ b/frc971/zeroing/absolute_and_absolute_encoder.h
@@ -5,6 +5,7 @@
 
 #include "flatbuffers/flatbuffers.h"
 
+#include "aos/containers/error_list.h"
 #include "frc971/zeroing/zeroing.h"
 
 namespace frc971 {
@@ -73,6 +74,8 @@
   bool zeroed_;
   // Marker to track whether an error has occurred.
   bool error_;
+  // Marker to track what kind of error has occured.
+  aos::ErrorList<ZeroingError> errors_;
   // The first valid offset we recorded. This is only set after zeroed_ first
   // changes to true.
   double first_offset_;
diff --git a/frc971/zeroing/absolute_and_absolute_encoder_test.cc b/frc971/zeroing/absolute_and_absolute_encoder_test.cc
index cc1872d..59b421a 100644
--- a/frc971/zeroing/absolute_and_absolute_encoder_test.cc
+++ b/frc971/zeroing/absolute_and_absolute_encoder_test.cc
@@ -1,15 +1,15 @@
 #include "frc971/zeroing/absolute_and_absolute_encoder.h"
 
-#include "gtest/gtest.h"
-
 #include "frc971/zeroing/zeroing_test.h"
+#include "glog/logging.h"
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
 
 namespace frc971 {
 namespace zeroing {
 namespace testing {
 
 using constants::AbsoluteAndAbsoluteEncoderZeroingConstants;
-using EstimatorState = AbsoluteAndAbsoluteEncoderZeroingEstimator::State;
 
 class AbsoluteAndAbsoluteEncoderZeroingTest : public ZeroingTest {
  protected:
@@ -17,7 +17,7 @@
               AbsoluteAndAbsoluteEncoderZeroingEstimator *estimator,
               double new_position) {
     simulator->MoveTo(new_position);
-    FBB fbb;
+    flatbuffers::FlatBufferBuilder fbb;
     estimator->UpdateEstimate(
         *simulator->FillSensorValues<AbsoluteAndAbsolutePosition>(&fbb));
   }
@@ -107,7 +107,7 @@
   AbsoluteAndAbsoluteEncoderZeroingEstimator estimator(constants);
 
   // We tolerate a couple NANs before we start.
-  FBB fbb;
+  flatbuffers::FlatBufferBuilder fbb;
   fbb.Finish(CreateAbsoluteAndAbsolutePosition(
       fbb, 0.0, ::std::numeric_limits<double>::quiet_NaN(), 0.0));
   for (size_t i = 0; i < kSampleSize - 1; ++i) {
@@ -180,7 +180,7 @@
 
   AbsoluteAndAbsoluteEncoderZeroingEstimator estimator(constants);
 
-  FBB fbb;
+  flatbuffers::FlatBufferBuilder fbb;
   fbb.Finish(CreateAbsoluteAndAbsolutePosition(
       fbb, 0.0, ::std::numeric_limits<double>::quiet_NaN(), 0.0));
   const auto sensor_values =
@@ -192,6 +192,14 @@
 
   estimator.UpdateEstimate(*sensor_values);
   ASSERT_TRUE(estimator.error());
+
+  fbb.Finish(estimator.GetEstimatorState(&fbb));
+  const AbsoluteAndAbsoluteEncoderEstimatorState *state =
+      flatbuffers::GetRoot<AbsoluteAndAbsoluteEncoderEstimatorState>(
+          fbb.GetBufferPointer());
+
+  ASSERT_GT(state->errors()->size(), 0);
+  EXPECT_EQ(state->errors()->Get(0), ZeroingError::LOST_ABSOLUTE_ENCODER);
 }
 
 TEST_F(AbsoluteAndAbsoluteEncoderZeroingTest,
@@ -234,12 +242,13 @@
   ASSERT_TRUE(estimator.zeroed());
   EXPECT_DOUBLE_EQ(start_pos, estimator.offset());
 
-  FBB fbb;
+  flatbuffers::FlatBufferBuilder fbb;
 
   fbb.Finish(estimator.GetEstimatorState(&fbb));
 
-  const EstimatorState *state =
-      flatbuffers::GetRoot<EstimatorState>(fbb.GetBufferPointer());
+  const AbsoluteAndAbsoluteEncoderEstimatorState *state =
+      flatbuffers::GetRoot<AbsoluteAndAbsoluteEncoderEstimatorState>(
+          fbb.GetBufferPointer());
 
   EXPECT_NEAR(state->position(), position, 1e-10);
 
@@ -253,6 +262,74 @@
   EXPECT_NEAR(state->single_turn_absolute_position(), 0.3, 1e-10);
 }
 
+// Tests that errors() adds the OFFSET_MOVED_TOO_FAR error when we move too far.
+TEST_F(AbsoluteAndAbsoluteEncoderZeroingTest,
+       TestAbsoluteAndAbsoluteEncoderZeroingStateErrors) {
+  const double full_range = 4.0;
+  const double distance_per_revolution = 1.0;
+  const double single_turn_distance_per_revolution = full_range;
+  const double start_pos = 2.1;
+  const double single_turn_middle_position = full_range * 0.5;
+  const double measured_absolute_position = 0.3 * distance_per_revolution;
+  const double single_turn_measured_absolute_position =
+      0.4 * single_turn_distance_per_revolution;
+
+  AbsoluteAndAbsoluteEncoderZeroingConstants constants{
+      kSampleSize,
+      distance_per_revolution,
+      measured_absolute_position,
+      single_turn_distance_per_revolution,
+      single_turn_measured_absolute_position,
+      single_turn_middle_position,
+      0.1,
+      kMovingBufferSize,
+      kIndexErrorFraction};
+
+  PositionSensorSimulator sim(distance_per_revolution,
+                              single_turn_distance_per_revolution);
+  sim.Initialize(start_pos, distance_per_revolution / 3.0, 0.0,
+                 measured_absolute_position,
+                 single_turn_measured_absolute_position);
+
+  AbsoluteAndAbsoluteEncoderZeroingEstimator estimator(constants);
+
+  const double position = 2.7;
+
+  for (size_t i = 0; i < kSampleSize + kMovingBufferSize - 1; ++i) {
+    MoveTo(&sim, &estimator, position);
+    ASSERT_FALSE(estimator.zeroed());
+  }
+  MoveTo(&sim, &estimator, position);
+  ASSERT_TRUE(estimator.zeroed());
+  EXPECT_DOUBLE_EQ(start_pos, estimator.offset());
+
+  // If the ratios suddenly get very messed up
+  flatbuffers::FlatBufferBuilder fbb;
+  fbb.Finish(CreateAbsoluteAndAbsolutePosition(fbb, 0.0, 0.0, 3.0));
+
+  const auto sensor_values =
+      flatbuffers::GetRoot<AbsoluteAndAbsolutePosition>(fbb.GetBufferPointer());
+
+  ASSERT_FALSE(estimator.error());
+
+  for (size_t i = 0; i < kSampleSize + kMovingBufferSize - 1; ++i) {
+    estimator.UpdateEstimate(*sensor_values);
+  }
+  ASSERT_TRUE(estimator.error());
+
+  flatbuffers::FlatBufferBuilder fbb2;
+  fbb2.Finish(estimator.GetEstimatorState(&fbb2));
+  const AbsoluteAndAbsoluteEncoderEstimatorState *state =
+      flatbuffers::GetRoot<AbsoluteAndAbsoluteEncoderEstimatorState>(
+          fbb2.GetBufferPointer());
+
+  for (ZeroingError err : *state->errors()) {
+    LOG(INFO) << "error: " << EnumNameZeroingError(err);
+  }
+  EXPECT_THAT(*state->errors(),
+              ::testing::ElementsAre(ZeroingError::OFFSET_MOVED_TOO_FAR));
+}
+
 }  // namespace testing
 }  // namespace zeroing
 }  // namespace frc971
diff --git a/frc971/zeroing/absolute_encoder.cc b/frc971/zeroing/absolute_encoder.cc
index ffdf9da..d0cd0d9 100644
--- a/frc971/zeroing/absolute_encoder.cc
+++ b/frc971/zeroing/absolute_encoder.cc
@@ -3,9 +3,9 @@
 #include <cmath>
 #include <numeric>
 
-#include "glog/logging.h"
-
+#include "aos/containers/error_list.h"
 #include "frc971/zeroing/wrap.h"
+#include "glog/logging.h"
 
 namespace frc971 {
 namespace zeroing {
@@ -29,7 +29,6 @@
   move_detector_.Reset();
 }
 
-
 // The math here is a bit backwards, but I think it'll be less error prone that
 // way and more similar to the version with a pot as well.
 //
@@ -49,11 +48,13 @@
   if (::std::isnan(info.absolute_encoder())) {
     if (zeroed_) {
       VLOG(1) << "NAN on absolute encoder.";
+      errors_.Set(ZeroingError::LOST_ABSOLUTE_ENCODER);
       error_ = true;
     } else {
       ++nan_samples_;
       VLOG(1) << "NAN on absolute encoder while zeroing " << nan_samples_;
       if (nan_samples_ >= constants_.average_filter_size) {
+        errors_.Set(ZeroingError::LOST_ABSOLUTE_ENCODER);
         error_ = true;
         zeroed_ = true;
       }
@@ -152,6 +153,7 @@
                 << ", current " << offset_ << ", allowable change: "
                 << constants_.allowable_encoder_error *
                        constants_.one_revolution_distance;
+        errors_.Set(ZeroingError::OFFSET_MOVED_TOO_FAR);
         error_ = true;
       }
 
@@ -166,11 +168,15 @@
 flatbuffers::Offset<AbsoluteEncoderZeroingEstimator::State>
 AbsoluteEncoderZeroingEstimator::GetEstimatorState(
     flatbuffers::FlatBufferBuilder *fbb) const {
+  flatbuffers::Offset<flatbuffers::Vector<ZeroingError>> errors_offset =
+      errors_.ToFlatbuffer(fbb);
+
   State::Builder builder(*fbb);
   builder.add_error(error_);
   builder.add_zeroed(zeroed_);
   builder.add_position(position_);
   builder.add_absolute_position(filtered_absolute_encoder_);
+  builder.add_errors(errors_offset);
   return builder.Finish();
 }
 
diff --git a/frc971/zeroing/absolute_encoder.h b/frc971/zeroing/absolute_encoder.h
index 0021e13..df40ec3 100644
--- a/frc971/zeroing/absolute_encoder.h
+++ b/frc971/zeroing/absolute_encoder.h
@@ -85,6 +85,9 @@
 
   // The filtered position.
   double position_ = 0.0;
+
+  // Marker to track what kind of error has occured.
+  aos::ErrorList<ZeroingError> errors_;
 };
 
 }  // namespace zeroing
diff --git a/frc971/zeroing/absolute_encoder_test.cc b/frc971/zeroing/absolute_encoder_test.cc
index 38ce069..ce485eb 100644
--- a/frc971/zeroing/absolute_encoder_test.cc
+++ b/frc971/zeroing/absolute_encoder_test.cc
@@ -1,8 +1,8 @@
 #include "frc971/zeroing/absolute_encoder.h"
 
-#include "gtest/gtest.h"
-
 #include "frc971/zeroing/zeroing_test.h"
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
 
 namespace frc971 {
 namespace zeroing {
@@ -15,7 +15,7 @@
   void MoveTo(PositionSensorSimulator *simulator,
               AbsoluteEncoderZeroingEstimator *estimator, double new_position) {
     simulator->MoveTo(new_position);
-    FBB fbb;
+    flatbuffers::FlatBufferBuilder fbb;
     estimator->UpdateEstimate(
         *simulator->FillSensorValues<AbsolutePosition>(&fbb));
   }
@@ -71,7 +71,7 @@
   AbsoluteEncoderZeroingEstimator estimator(constants);
 
   // We tolerate a couple NANs before we start.
-  FBB fbb;
+  flatbuffers::FlatBufferBuilder fbb;
   fbb.Finish(CreateAbsolutePosition(
       fbb, 0.0, ::std::numeric_limits<double>::quiet_NaN()));
   const auto sensor_values =
@@ -126,7 +126,7 @@
 
   AbsoluteEncoderZeroingEstimator estimator(constants);
 
-  FBB fbb;
+  flatbuffers::FlatBufferBuilder fbb;
   fbb.Finish(CreateAbsolutePosition(
       fbb, 0.0, ::std::numeric_limits<double>::quiet_NaN()));
   const auto sensor_values =
@@ -138,6 +138,16 @@
 
   estimator.UpdateEstimate(*sensor_values);
   ASSERT_TRUE(estimator.error());
+
+  flatbuffers::FlatBufferBuilder fbb2;
+  fbb2.Finish(estimator.GetEstimatorState(&fbb2));
+
+  const AbsoluteEncoderEstimatorState *state =
+      flatbuffers::GetRoot<AbsoluteEncoderEstimatorState>(
+          fbb2.GetBufferPointer());
+
+  EXPECT_THAT(*state->errors(),
+              ::testing::ElementsAre(ZeroingError::LOST_ABSOLUTE_ENCODER));
 }
 
 }  // namespace testing
diff --git a/frc971/zeroing/pot_and_absolute_encoder.cc b/frc971/zeroing/pot_and_absolute_encoder.cc
index 200f399..32c4f60 100644
--- a/frc971/zeroing/pot_and_absolute_encoder.cc
+++ b/frc971/zeroing/pot_and_absolute_encoder.cc
@@ -3,9 +3,9 @@
 #include <cmath>
 #include <numeric>
 
-#include "glog/logging.h"
-
+#include "aos/containers/error_list.h"
 #include "frc971/zeroing/wrap.h"
+#include "glog/logging.h"
 
 namespace frc971 {
 namespace zeroing {
@@ -57,11 +57,13 @@
   if (::std::isnan(info.absolute_encoder())) {
     if (zeroed_) {
       VLOG(1) << "NAN on absolute encoder.";
+      errors_.Set(ZeroingError::LOST_ABSOLUTE_ENCODER);
       error_ = true;
     } else {
       ++nan_samples_;
-      VLOG(1) << "NAN on absolute encoder while zeroing" << nan_samples_;
+      VLOG(1) << "NAN on absolute encoder while zeroing " << nan_samples_;
       if (nan_samples_ >= constants_.average_filter_size) {
+        errors_.Set(ZeroingError::LOST_ABSOLUTE_ENCODER);
         error_ = true;
         zeroed_ = true;
       }
@@ -168,6 +170,7 @@
                 << ", current " << offset_ << ", allowable change: "
                 << constants_.allowable_encoder_error *
                        constants_.one_revolution_distance;
+        errors_.Set(ZeroingError::OFFSET_MOVED_TOO_FAR);
         error_ = true;
       }
 
@@ -183,12 +186,16 @@
 flatbuffers::Offset<PotAndAbsoluteEncoderZeroingEstimator::State>
 PotAndAbsoluteEncoderZeroingEstimator::GetEstimatorState(
     flatbuffers::FlatBufferBuilder *fbb) const {
+  flatbuffers::Offset<flatbuffers::Vector<ZeroingError>> errors_offset =
+      errors_.ToFlatbuffer(fbb);
+
   State::Builder builder(*fbb);
   builder.add_error(error_);
   builder.add_zeroed(zeroed_);
   builder.add_position(position_);
   builder.add_pot_position(filtered_position_);
   builder.add_absolute_position(filtered_absolute_encoder_);
+  builder.add_errors(errors_offset);
   return builder.Finish();
 }
 
diff --git a/frc971/zeroing/pot_and_absolute_encoder.h b/frc971/zeroing/pot_and_absolute_encoder.h
index 133eaf5..2ff141f 100644
--- a/frc971/zeroing/pot_and_absolute_encoder.h
+++ b/frc971/zeroing/pot_and_absolute_encoder.h
@@ -3,8 +3,8 @@
 
 #include <vector>
 
+#include "aos/containers/error_list.h"
 #include "flatbuffers/flatbuffers.h"
-
 #include "frc971/zeroing/zeroing.h"
 
 namespace frc971 {
@@ -92,6 +92,9 @@
   double filtered_position_ = 0.0;
   // The filtered position.
   double position_ = 0.0;
+
+  // Marker to track what kind of error has occured.
+  aos::ErrorList<ZeroingError> errors_;
 };
 
 }  // namespace zeroing
diff --git a/frc971/zeroing/pot_and_absolute_encoder_test.cc b/frc971/zeroing/pot_and_absolute_encoder_test.cc
index ba89834..1784fed 100644
--- a/frc971/zeroing/pot_and_absolute_encoder_test.cc
+++ b/frc971/zeroing/pot_and_absolute_encoder_test.cc
@@ -1,8 +1,8 @@
 #include "frc971/zeroing/pot_and_absolute_encoder.h"
 
-#include "gtest/gtest.h"
-
 #include "frc971/zeroing/zeroing_test.h"
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
 
 namespace frc971 {
 namespace zeroing {
@@ -16,7 +16,7 @@
               PotAndAbsoluteEncoderZeroingEstimator *estimator,
               double new_position) {
     simulator->MoveTo(new_position);
-    FBB fbb;
+    flatbuffers::FlatBufferBuilder fbb;
     estimator->UpdateEstimate(
         *simulator->FillSensorValues<PotAndAbsolutePosition>(&fbb));
   }
@@ -70,7 +70,7 @@
   PotAndAbsoluteEncoderZeroingEstimator estimator(constants);
 
   // We tolerate a couple NANs before we start.
-  FBB fbb;
+  flatbuffers::FlatBufferBuilder fbb;
   fbb.Finish(CreatePotAndAbsolutePosition(
       fbb, 0.0, ::std::numeric_limits<double>::quiet_NaN(), 0.0));
   for (size_t i = 0; i < kSampleSize - 1; ++i) {
@@ -124,7 +124,7 @@
 
   PotAndAbsoluteEncoderZeroingEstimator estimator(constants);
 
-  FBB fbb;
+  flatbuffers::FlatBufferBuilder fbb;
   fbb.Finish(CreatePotAndAbsolutePosition(
       fbb, 0.0, ::std::numeric_limits<double>::quiet_NaN(), 0.0));
   const auto sensor_values =
@@ -136,6 +136,16 @@
 
   estimator.UpdateEstimate(*sensor_values);
   ASSERT_TRUE(estimator.error());
+
+  flatbuffers::FlatBufferBuilder fbb2;
+  fbb2.Finish(estimator.GetEstimatorState(&fbb2));
+
+  const PotAndAbsoluteEncoderEstimatorState *state =
+      flatbuffers::GetRoot<PotAndAbsoluteEncoderEstimatorState>(
+          fbb2.GetBufferPointer());
+
+  EXPECT_THAT(*state->errors(),
+              ::testing::ElementsAre(ZeroingError::LOST_ABSOLUTE_ENCODER));
 }
 
 }  // namespace testing
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 01c9f01..10c6288 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -72,12 +72,6 @@
       '@angular/forms': 15.1.5
       '@org_frc971/scouting/www/counter_button': link:../counter_button
 
-  scouting/www/import_match_list:
-    specifiers:
-      '@angular/forms': 15.1.5
-    dependencies:
-      '@angular/forms': 15.1.5
-
   scouting/www/match_list:
     specifiers:
       '@angular/forms': 15.1.5
diff --git a/scouting/deploy/README.md b/scouting/deploy/README.md
index 6d223da..2c6ebdb 100644
--- a/scouting/deploy/README.md
+++ b/scouting/deploy/README.md
@@ -35,3 +35,10 @@
 $ sudo systemctl start scouting.service
 $ sudo systemctl restart scouting.service
 ```
+
+Incompatible database changes
+--------------------------------------------------------------------------------
+When deploying a new scouting application that has incompatible changes, you
+may want to clear the existing database. This can be done by also specifying
+the `--clear-db` option when deploying. This option will cause all tables to be
+dropped before the scouting app is deployed.
diff --git a/scouting/deploy/deploy.py b/scouting/deploy/deploy.py
index 5967956..2f14f07 100644
--- a/scouting/deploy/deploy.py
+++ b/scouting/deploy/deploy.py
@@ -19,9 +19,38 @@
         default="scouting.frc971.org",
         help="The SSH host to install the scouting web server to.",
     )
+    parser.add_argument(
+        "--clear-db",
+        action="store_true",
+        help=("If set, will stop the existing scouting server and clear the "
+              "database before deploying the new one."),
+    )
     args = parser.parse_args(argv[1:])
     deb = Path(args.deb)
 
+    if args.clear_db:
+        print("Stopping the scouting app.")
+        subprocess.run(
+            f"ssh -tt {args.host} sudo systemctl stop scouting.service",
+            # In case the scouting app isn't installed, ignore the error here.
+            check=False,
+            stdin=sys.stdin)
+        print("Clearing the database.")
+        subprocess.run(
+            " ".join([
+                f"ssh -tt {args.host} sudo -u postgres psql",
+                # Drop all tables in the same schema.
+                "-c 'drop schema public cascade;'",
+                # Create an empty schema for the scouting app to use.
+                "-c 'create schema public;'",
+                # List all tables as a sanity check.
+                "-c '\dt'",
+                "postgres",
+            ]),
+            shell=True,
+            check=True,
+            stdin=sys.stdin)
+
     # Copy the .deb to the scouting server, install it, and delete it again.
     subprocess.run(["rsync", "-L", args.deb, f"{args.host}:/tmp/{deb.name}"],
                    check=True,
diff --git a/scouting/scouting_test.cy.js b/scouting/scouting_test.cy.js
index a2dfa83..105cf43 100644
--- a/scouting/scouting_test.cy.js
+++ b/scouting/scouting_test.cy.js
@@ -62,17 +62,6 @@
   cy.visit('/');
   disableAlerts();
   cy.title().should('eq', 'FRC971 Scouting Application');
-
-  // Import the match list before running any tests. Ideally this should be
-  // run in beforeEach(), but it's not worth doing that at this time. Our
-  // tests are basic enough not to require this.
-  switchToTab('Import Match List');
-  headerShouldBe('Import Match List');
-  setInputTo('#year', '2016');
-  setInputTo('#event_code', 'nytr');
-  clickButton('Import');
-
-  cy.get('.progress_message').contains('Successfully imported match list.');
 });
 
 beforeEach(() => {
diff --git a/scouting/scraping/background/BUILD b/scouting/scraping/background/BUILD
new file mode 100644
index 0000000..9aa92c9
--- /dev/null
+++ b/scouting/scraping/background/BUILD
@@ -0,0 +1,9 @@
+load("@io_bazel_rules_go//go:def.bzl", "go_library")
+
+go_library(
+    name = "background",
+    srcs = ["background.go"],
+    importpath = "github.com/frc971/971-Robot-Code/scouting/scraping/background",
+    target_compatible_with = ["@platforms//cpu:x86_64"],
+    visibility = ["//visibility:public"],
+)
diff --git a/scouting/scraping/background/background.go b/scouting/scraping/background/background.go
new file mode 100644
index 0000000..5af8c3e
--- /dev/null
+++ b/scouting/scraping/background/background.go
@@ -0,0 +1,52 @@
+package background
+
+import (
+	"time"
+)
+
+// A helper to run a function in the background every ~10 minutes. Technically
+// can be used for a lot of different things, but is primarily geared towards
+// scraping thebluealliance.com.
+type BackgroundScraper struct {
+	doneChan     chan<- bool
+	checkStopped chan<- bool
+}
+
+func (scraper *BackgroundScraper) Start(scrape func()) {
+	scraper.doneChan = make(chan bool, 1)
+	scraper.checkStopped = make(chan bool, 1)
+
+	go func() {
+		// Setting start time to 11 minutes prior so getRankings called instantly when Start() called
+		startTime := time.Now().Add(-11 * time.Minute)
+		for {
+			curTime := time.Now()
+			diff := curTime.Sub(startTime)
+
+			if diff.Minutes() > 10 {
+				scrape()
+				startTime = curTime
+			}
+
+			if len(scraper.doneChan) != 0 {
+				break
+			}
+
+			time.Sleep(time.Second)
+		}
+
+		scraper.checkStopped <- true
+	}()
+}
+
+func (scraper *BackgroundScraper) Stop() {
+	scraper.doneChan <- true
+
+	for {
+		if len(scraper.checkStopped) != 0 {
+			close(scraper.doneChan)
+			close(scraper.checkStopped)
+			break
+		}
+	}
+}
diff --git a/scouting/scraping/scrape.go b/scouting/scraping/scrape.go
index 625157a..b905465 100644
--- a/scouting/scraping/scrape.go
+++ b/scouting/scraping/scrape.go
@@ -89,36 +89,17 @@
 	return bodyBytes, nil
 }
 
-// Return all matches in event according to TBA
-func AllMatches(year int32, eventCode, configPath string) ([]Match, error) {
-	bodyBytes, err := getJson(year, eventCode, configPath, "matches")
-
+func GetAllData[T interface{}](year int32, eventCode, configPath string, category string) (T, error) {
+	var result T
+	bodyBytes, err := getJson(year, eventCode, configPath, category)
 	if err != nil {
-		return nil, err
+		return result, err
 	}
 
-	var matches []Match
-	// Unmarshal json into go usable format.
-	if err := json.Unmarshal([]byte(bodyBytes), &matches); err != nil {
-		return nil, errors.New(fmt.Sprint("Failed to parse JSON received from TBA: ", err))
+	// Unmarshal the JSON data into the in-memory format.
+	if err = json.Unmarshal([]byte(bodyBytes), &result); err != nil {
+		return result, errors.New(fmt.Sprint("Failed to parse ", category, " JSON received from TBA: ", err))
 	}
 
-	return matches, nil
-}
-
-// Return event rankings according to TBA
-func AllRankings(year int32, eventCode, configPath string) (EventRanking, error) {
-	bodyBytes, err := getJson(year, eventCode, configPath, "rankings")
-
-	if err != nil {
-		return EventRanking{}, err
-	}
-
-	var rankings EventRanking
-	// Unmarshal json into go usable format.
-	if err := json.Unmarshal([]byte(bodyBytes), &rankings); err != nil {
-		return EventRanking{}, errors.New(fmt.Sprint("Failed to parse JSON received from TBA: ", err))
-	}
-
-	return rankings, nil
+	return result, nil
 }
diff --git a/scouting/scraping/scraping_demo.go b/scouting/scraping/scraping_demo.go
index 69cdbff..0c58612 100644
--- a/scouting/scraping/scraping_demo.go
+++ b/scouting/scraping/scraping_demo.go
@@ -11,6 +11,25 @@
 	"github.com/frc971/971-Robot-Code/scouting/scraping"
 )
 
+func dumpData[T interface{}](jsonPtr *bool, category string) {
+	// Get all the data.
+	data, err := scraping.GetAllData[T](2016, "nytr", "", category)
+	if err != nil {
+		log.Fatal("Failed to scrape ", category, " data: ", err)
+	}
+
+	// Dump the data.
+	if *jsonPtr {
+		jsonData, err := json.MarshalIndent(data, "", "  ")
+		if err != nil {
+			log.Fatal("Failed to turn ranking list into JSON: ", err)
+		}
+		fmt.Println(string(jsonData))
+	} else {
+		spew.Dump(data)
+	}
+}
+
 func main() {
 	jsonPtr := flag.Bool("json", false, "If set, dump as JSON, rather than Go debug output.")
 	demoCategory := flag.String("category", "matches", "Decide whether to demo matches or rankings.")
@@ -18,38 +37,8 @@
 	flag.Parse()
 
 	if *demoCategory == "rankings" {
-		// Get all the rankings.
-		rankings, err := scraping.AllRankings(2016, "nytr", "")
-		if err != nil {
-			log.Fatal("Failed to scrape ranking list: ", err)
-		}
-
-		// Dump the rankings.
-		if *jsonPtr {
-			jsonData, err := json.MarshalIndent(rankings, "", "  ")
-			if err != nil {
-				log.Fatal("Failed to turn ranking list into JSON: ", err)
-			}
-			fmt.Println(string(jsonData))
-		} else {
-			spew.Dump(rankings)
-		}
+		dumpData[scraping.EventRanking](jsonPtr, "rankings")
 	} else if *demoCategory == "matches" {
-		// Get all the matches.
-		matches, err := scraping.AllMatches(2016, "nytr", "")
-		if err != nil {
-			log.Fatal("Failed to scrape match list: ", err)
-		}
-
-		// Dump the matches.
-		if *jsonPtr {
-			jsonData, err := json.MarshalIndent(matches, "", "  ")
-			if err != nil {
-				log.Fatal("Failed to turn match list into JSON: ", err)
-			}
-			fmt.Println(string(jsonData))
-		} else {
-			spew.Dump(matches)
-		}
+		dumpData[[]scraping.Match](jsonPtr, "matches")
 	}
 }
diff --git a/scouting/testing/scouting_test_servers.py b/scouting/testing/scouting_test_servers.py
index d1e4e32..40412c0 100644
--- a/scouting/testing/scouting_test_servers.py
+++ b/scouting/testing/scouting_test_servers.py
@@ -47,14 +47,15 @@
     return config
 
 
-def create_tba_config(tmpdir: Path) -> Path:
-    # Configure the scouting webserver to scrape data from our fake TBA
-    # server.
+def create_tba_config(tmpdir: Path, year: int, event_code: str) -> Path:
+    # Configure the scouting webserver to scrape data from our fake TBA server.
     config = tmpdir / "scouting_config.json"
     config.write_text(
         json.dumps({
             "api_key": "dummy_key_that_is_not_actually_used_in_this_test",
             "base_url": "http://localhost:7000",
+            "year": year,
+            "event_code": event_code,
         }))
     return config
 
@@ -72,7 +73,11 @@
 class Runner:
     """Helps manage the services we need for testing the scouting app."""
 
-    def start(self, port: int, notify_fd: int = 0):
+    def start(self,
+              port: int,
+              notify_fd: int = 0,
+              year: int = 2016,
+              event_code: str = "nytr"):
         """Starts the services needed for testing the scouting app.
 
         if notify_fd is set to a non-zero value, the string "READY" is written
@@ -83,7 +88,9 @@
         self.tmpdir.mkdir(exist_ok=True)
 
         db_config = create_db_config(self.tmpdir)
-        tba_config = create_tba_config(self.tmpdir)
+        tba_config = create_tba_config(self.tmpdir,
+                                       year=year,
+                                       event_code=event_code)
 
         # The database needs to be running and addressable before the scouting
         # webserver can start.
@@ -102,8 +109,7 @@
         ])
 
         # Create a fake TBA server to serve the static match list.
-        set_up_tba_api_dir(self.tmpdir, year=2016, event_code="nytr")
-        set_up_tba_api_dir(self.tmpdir, year=2020, event_code="fake")
+        set_up_tba_api_dir(self.tmpdir, year, event_code)
         self.fake_tba_api = subprocess.Popen(
             ["python3", "-m", "http.server", "7000"],
             cwd=self.tmpdir,
diff --git a/scouting/webserver/BUILD b/scouting/webserver/BUILD
index 3df423e..934aff1 100644
--- a/scouting/webserver/BUILD
+++ b/scouting/webserver/BUILD
@@ -8,7 +8,8 @@
     visibility = ["//visibility:private"],
     deps = [
         "//scouting/db",
-        "//scouting/scraping",
+        "//scouting/scraping/background",
+        "//scouting/webserver/match_list",
         "//scouting/webserver/rankings",
         "//scouting/webserver/requests",
         "//scouting/webserver/server",
diff --git a/scouting/webserver/main.go b/scouting/webserver/main.go
index d2fbdfe..1811b7f 100644
--- a/scouting/webserver/main.go
+++ b/scouting/webserver/main.go
@@ -14,7 +14,8 @@
 	"time"
 
 	"github.com/frc971/971-Robot-Code/scouting/db"
-	"github.com/frc971/971-Robot-Code/scouting/scraping"
+	"github.com/frc971/971-Robot-Code/scouting/scraping/background"
+	"github.com/frc971/971-Robot-Code/scouting/webserver/match_list"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/rankings"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/server"
@@ -119,21 +120,24 @@
 	}
 	defer database.Delete()
 
-	scrapeMatchList := func(year int32, eventCode string) ([]scraping.Match, error) {
-		if *blueAllianceConfigPtr == "" {
-			return nil, errors.New("Cannot scrape TBA's match list without a config file.")
-		}
-		return scraping.AllMatches(year, eventCode, *blueAllianceConfigPtr)
-	}
-
 	scoutingServer := server.NewScoutingServer()
 	static.ServePages(scoutingServer, *dirPtr)
-	requests.HandleRequests(database, scrapeMatchList, scoutingServer)
+	requests.HandleRequests(database, scoutingServer)
 	scoutingServer.Start(*portPtr)
 	fmt.Println("Serving", *dirPtr, "on port", *portPtr)
 
-	scraper := rankings.RankingScraper{}
-	scraper.Start(database, 0, "", *blueAllianceConfigPtr)
+	// Since Go doesn't support default arguments, we use 0 and "" to
+	// indicate that we want to source the values from the config.
+
+	matchListScraper := background.BackgroundScraper{}
+	matchListScraper.Start(func() {
+		match_list.GetMatchList(database, 0, "", *blueAllianceConfigPtr)
+	})
+
+	rankingsScraper := background.BackgroundScraper{}
+	rankingsScraper.Start(func() {
+		rankings.GetRankings(database, 0, "", *blueAllianceConfigPtr)
+	})
 
 	// Block until the user hits Ctrl-C.
 	sigint := make(chan os.Signal, 1)
@@ -144,6 +148,7 @@
 
 	fmt.Println("Shutting down.")
 	scoutingServer.Stop()
-	scraper.Stop()
+	rankingsScraper.Stop()
+	matchListScraper.Stop()
 	fmt.Println("Successfully shut down.")
 }
diff --git a/scouting/webserver/match_list/BUILD b/scouting/webserver/match_list/BUILD
new file mode 100644
index 0000000..e32b93c
--- /dev/null
+++ b/scouting/webserver/match_list/BUILD
@@ -0,0 +1,12 @@
+load("@io_bazel_rules_go//go:def.bzl", "go_library")
+
+go_library(
+    name = "match_list",
+    srcs = ["match_list.go"],
+    importpath = "github.com/frc971/971-Robot-Code/scouting/webserver/match_list",
+    visibility = ["//visibility:public"],
+    deps = [
+        "//scouting/db",
+        "//scouting/scraping",
+    ],
+)
diff --git a/scouting/webserver/match_list/match_list.go b/scouting/webserver/match_list/match_list.go
new file mode 100644
index 0000000..9029438
--- /dev/null
+++ b/scouting/webserver/match_list/match_list.go
@@ -0,0 +1,138 @@
+package match_list
+
+import (
+	"errors"
+	"fmt"
+	"github.com/frc971/971-Robot-Code/scouting/db"
+	"github.com/frc971/971-Robot-Code/scouting/scraping"
+	"log"
+	"strconv"
+	"strings"
+)
+
+type Database interface {
+	AddToMatch(db.TeamMatch) error
+}
+
+func parseTeamKey(teamKey string) (int, error) {
+	// TBA prefixes teams with "frc". Not sure why. Get rid of that.
+	teamKey = strings.TrimPrefix(teamKey, "frc")
+	magnitude := 0
+	if strings.HasSuffix(teamKey, "A") {
+		magnitude = 0
+		teamKey = strings.TrimSuffix(teamKey, "A")
+	} else if strings.HasSuffix(teamKey, "B") {
+		magnitude = 9
+		teamKey = strings.TrimSuffix(teamKey, "B")
+	} else if strings.HasSuffix(teamKey, "C") {
+		magnitude = 8
+		teamKey = strings.TrimSuffix(teamKey, "C")
+	} else if strings.HasSuffix(teamKey, "D") {
+		magnitude = 7
+		teamKey = strings.TrimSuffix(teamKey, "D")
+	} else if strings.HasSuffix(teamKey, "E") {
+		magnitude = 6
+		teamKey = strings.TrimSuffix(teamKey, "E")
+	} else if strings.HasSuffix(teamKey, "F") {
+		magnitude = 5
+		teamKey = strings.TrimSuffix(teamKey, "F")
+	}
+
+	if magnitude != 0 {
+		teamKey = strconv.Itoa(magnitude) + teamKey
+	}
+
+	result, err := strconv.Atoi(teamKey)
+	return result, err
+}
+
+// Parses the alliance data from the specified match and returns the three red
+// teams and the three blue teams.
+func parseTeamKeys(match *scraping.Match) ([3]int32, [3]int32, error) {
+	redKeys := match.Alliances.Red.TeamKeys
+	blueKeys := match.Alliances.Blue.TeamKeys
+
+	if len(redKeys) != 3 || len(blueKeys) != 3 {
+		return [3]int32{}, [3]int32{}, errors.New(fmt.Sprintf(
+			"Found %d red teams and %d blue teams.", len(redKeys), len(blueKeys)))
+	}
+
+	var red [3]int32
+	for i, key := range redKeys {
+		team, err := parseTeamKey(key)
+		if err != nil {
+			return [3]int32{}, [3]int32{}, errors.New(fmt.Sprintf(
+				"Failed to parse red %d team '%s' as integer: %v", i+1, key, err))
+		}
+		red[i] = int32(team)
+	}
+	var blue [3]int32
+	for i, key := range blueKeys {
+		team, err := parseTeamKey(key)
+		if err != nil {
+			return [3]int32{}, [3]int32{}, errors.New(fmt.Sprintf(
+				"Failed to parse blue %d team '%s' as integer: %v", i+1, key, err))
+		}
+		blue[i] = int32(team)
+	}
+	return red, blue, nil
+}
+
+func GetMatchList(database Database, year int32, eventCode string, blueAllianceConfig string) {
+	matches, err := scraping.GetAllData[[]scraping.Match](year, eventCode, blueAllianceConfig, "matches")
+	if err != nil {
+		log.Println("Failed to scrape match list: ", err)
+		return
+	}
+
+	for _, match := range matches {
+		// Make sure the data is valid.
+		red, blue, err := parseTeamKeys(&match)
+		if err != nil {
+			log.Println("TheBlueAlliance data for match %d is malformed: %v", match.MatchNumber, err)
+			return
+		}
+
+		team_matches := []db.TeamMatch{
+			{
+				MatchNumber: int32(match.MatchNumber),
+				SetNumber:   int32(match.SetNumber), CompLevel: match.CompLevel,
+				Alliance: "R", AlliancePosition: 1, TeamNumber: red[0],
+			},
+			{
+				MatchNumber: int32(match.MatchNumber),
+				SetNumber:   int32(match.SetNumber), CompLevel: match.CompLevel,
+				Alliance: "R", AlliancePosition: 2, TeamNumber: red[1],
+			},
+			{
+				MatchNumber: int32(match.MatchNumber),
+				SetNumber:   int32(match.SetNumber), CompLevel: match.CompLevel,
+				Alliance: "R", AlliancePosition: 3, TeamNumber: red[2],
+			},
+			{
+				MatchNumber: int32(match.MatchNumber),
+				SetNumber:   int32(match.SetNumber), CompLevel: match.CompLevel,
+				Alliance: "B", AlliancePosition: 1, TeamNumber: blue[0],
+			},
+			{
+				MatchNumber: int32(match.MatchNumber),
+				SetNumber:   int32(match.SetNumber), CompLevel: match.CompLevel,
+				Alliance: "B", AlliancePosition: 2, TeamNumber: blue[1],
+			},
+			{
+				MatchNumber: int32(match.MatchNumber),
+				SetNumber:   int32(match.SetNumber), CompLevel: match.CompLevel,
+				Alliance: "B", AlliancePosition: 3, TeamNumber: blue[2],
+			},
+		}
+
+		for _, match := range team_matches {
+			// Iterate through matches to check they can be added to database.
+			err = database.AddToMatch(match)
+			if err != nil {
+				log.Println("Failed to add team %d from match %d to the database: %v", match.TeamNumber, match.MatchNumber, err)
+				return
+			}
+		}
+	}
+}
diff --git a/scouting/webserver/rankings/BUILD b/scouting/webserver/rankings/BUILD
index c74f88f..4696d26 100644
--- a/scouting/webserver/rankings/BUILD
+++ b/scouting/webserver/rankings/BUILD
@@ -21,6 +21,7 @@
     embed = [":rankings"],
     deps = [
         "//scouting/db",
+        "//scouting/scraping/background",
         "//scouting/webserver/server",
     ],
 )
diff --git a/scouting/webserver/rankings/rankings.go b/scouting/webserver/rankings/rankings.go
index 6e63c0a..0d20b54 100644
--- a/scouting/webserver/rankings/rankings.go
+++ b/scouting/webserver/rankings/rankings.go
@@ -6,14 +6,8 @@
 	"log"
 	"strconv"
 	"strings"
-	"time"
 )
 
-type RankingScraper struct {
-	doneChan     chan<- bool
-	checkStopped chan<- bool
-}
-
 type Database interface {
 	AddOrUpdateRankings(db.Ranking) error
 }
@@ -24,8 +18,8 @@
 	return strconv.Atoi(teamKey)
 }
 
-func getRankings(database Database, year int32, eventCode string, blueAllianceConfig string) {
-	rankings, err := scraping.AllRankings(year, eventCode, blueAllianceConfig)
+func GetRankings(database Database, year int32, eventCode string, blueAllianceConfig string) {
+	rankings, err := scraping.GetAllData[scraping.EventRanking](year, eventCode, blueAllianceConfig, "rankings")
 	if err != nil {
 		log.Println("Failed to scrape ranking list: ", err)
 		return
@@ -51,42 +45,3 @@
 		}
 	}
 }
-
-func (scraper *RankingScraper) Start(database Database, year int32, eventCode string, blueAllianceConfig string) {
-	scraper.doneChan = make(chan bool, 1)
-	scraper.checkStopped = make(chan bool, 1)
-
-	go func(database Database, year int32, eventCode string) {
-		// Setting start time to 11 minutes prior so getRankings called instantly when Start() called
-		startTime := time.Now().Add(-11 * time.Minute)
-		for {
-			curTime := time.Now()
-			diff := curTime.Sub(startTime)
-
-			if diff.Minutes() > 10 {
-				getRankings(database, year, eventCode, blueAllianceConfig)
-				startTime = curTime
-			}
-
-			if len(scraper.doneChan) != 0 {
-				break
-			}
-
-			time.Sleep(time.Second)
-		}
-
-		scraper.checkStopped <- true
-	}(database, year, eventCode)
-}
-
-func (scraper *RankingScraper) Stop() {
-	scraper.doneChan <- true
-
-	for {
-		if len(scraper.checkStopped) != 0 {
-			close(scraper.doneChan)
-			close(scraper.checkStopped)
-			break
-		}
-	}
-}
diff --git a/scouting/webserver/rankings/rankings_test.go b/scouting/webserver/rankings/rankings_test.go
index aa23c76..6f8af3b 100644
--- a/scouting/webserver/rankings/rankings_test.go
+++ b/scouting/webserver/rankings/rankings_test.go
@@ -2,9 +2,11 @@
 
 import (
 	"github.com/frc971/971-Robot-Code/scouting/db"
+	"github.com/frc971/971-Robot-Code/scouting/scraping/background"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/server"
 	"net/http"
 	"reflect"
+	"strings"
 	"testing"
 	"time"
 )
@@ -18,8 +20,13 @@
 	return nil
 }
 
-func ServeRankings(h http.Handler) http.Handler {
+func ServeRankings(t *testing.T, h http.Handler) http.Handler {
 	fn := func(w http.ResponseWriter, r *http.Request) {
+		// Make sure that the rankings are requested properly.
+		if !strings.HasSuffix(r.URL.Path, "/2016nytr/rankings") {
+			t.Error("Got unexpected URL: ", r.URL.Path)
+		}
+
 		r.URL.Path = "scraping/test_data/2016_nytr_rankings.json"
 
 		h.ServeHTTP(w, r)
@@ -30,13 +37,15 @@
 
 func TestGetRankings(t *testing.T) {
 	database := MockDatabase{}
-	scraper := RankingScraper{}
+	scraper := background.BackgroundScraper{}
 	tbaServer := server.NewScoutingServer()
-	tbaServer.Handle("/", ServeRankings(http.FileServer(http.Dir("../../"))))
+	tbaServer.Handle("/", ServeRankings(t, http.FileServer(http.Dir("../../"))))
 	tbaServer.Start(8000)
 	defer tbaServer.Stop()
 
-	scraper.Start(&database, 0, "", "scouting_test_config.json")
+	scraper.Start(func() {
+		GetRankings(&database, 0, "", "scouting_test_config.json")
+	})
 	defer scraper.Stop()
 
 	for {
diff --git a/scouting/webserver/rankings/scouting_test_config.json b/scouting/webserver/rankings/scouting_test_config.json
index 40a7747..6bc4fec 100644
--- a/scouting/webserver/rankings/scouting_test_config.json
+++ b/scouting/webserver/rankings/scouting_test_config.json
@@ -1,6 +1,6 @@
 {
      "api_key": "dummy_key_that_is_not_actually_used_in_this_test",
      "base_url": "http://localhost:8000",
-     "year": 2022,
-     "event_code": "CMPTX"
-}
\ No newline at end of file
+     "year": 2016,
+     "event_code": "nytr"
+}
diff --git a/scouting/webserver/requests/BUILD b/scouting/webserver/requests/BUILD
index 9eb207d..e0d3e87 100644
--- a/scouting/webserver/requests/BUILD
+++ b/scouting/webserver/requests/BUILD
@@ -8,10 +8,7 @@
     visibility = ["//visibility:public"],
     deps = [
         "//scouting/db",
-        "//scouting/scraping",
         "//scouting/webserver/requests/messages:error_response_go_fbs",
-        "//scouting/webserver/requests/messages:refresh_match_list_go_fbs",
-        "//scouting/webserver/requests/messages:refresh_match_list_response_go_fbs",
         "//scouting/webserver/requests/messages:request_all_driver_rankings_go_fbs",
         "//scouting/webserver/requests/messages:request_all_driver_rankings_response_go_fbs",
         "//scouting/webserver/requests/messages:request_all_matches_go_fbs",
@@ -46,11 +43,8 @@
     target_compatible_with = ["@platforms//cpu:x86_64"],
     deps = [
         "//scouting/db",
-        "//scouting/scraping",
         "//scouting/webserver/requests/debug",
         "//scouting/webserver/requests/messages:error_response_go_fbs",
-        "//scouting/webserver/requests/messages:refresh_match_list_go_fbs",
-        "//scouting/webserver/requests/messages:refresh_match_list_response_go_fbs",
         "//scouting/webserver/requests/messages:request_all_driver_rankings_go_fbs",
         "//scouting/webserver/requests/messages:request_all_driver_rankings_response_go_fbs",
         "//scouting/webserver/requests/messages:request_all_matches_go_fbs",
diff --git a/scouting/webserver/requests/debug/BUILD b/scouting/webserver/requests/debug/BUILD
index 650ba82..1473b3c 100644
--- a/scouting/webserver/requests/debug/BUILD
+++ b/scouting/webserver/requests/debug/BUILD
@@ -8,7 +8,6 @@
     visibility = ["//visibility:public"],
     deps = [
         "//scouting/webserver/requests/messages:error_response_go_fbs",
-        "//scouting/webserver/requests/messages:refresh_match_list_response_go_fbs",
         "//scouting/webserver/requests/messages:request_all_driver_rankings_response_go_fbs",
         "//scouting/webserver/requests/messages:request_all_matches_response_go_fbs",
         "//scouting/webserver/requests/messages:request_all_notes_response_go_fbs",
diff --git a/scouting/webserver/requests/debug/cli/cli_test.py b/scouting/webserver/requests/debug/cli/cli_test.py
index 6d33082..4e47b2d 100644
--- a/scouting/webserver/requests/debug/cli/cli_test.py
+++ b/scouting/webserver/requests/debug/cli/cli_test.py
@@ -13,8 +13,19 @@
 from typing import Any, Dict, List
 import unittest
 
+from rules_python.python.runfiles import runfiles
+
 import scouting.testing.scouting_test_servers
 
+RUNFILES = runfiles.Create()
+
+# This regex finds the number of matches that the web server has imported. This
+# is intended to parse the output of the debug cli's `-requestAllMatches`
+# option.
+MATCH_LIST_LENGTH_EXTRACTION_RE = re.compile(
+    r"MatchList: \(\[\]\*request_all_matches_response.MatchT\) \(len=(\d+) cap=.*\) \{"
+)
+
 
 def write_json_request(content: Dict[str, Any]):
     """Writes a JSON file with the specified dict content."""
@@ -37,29 +48,52 @@
     )
 
 
+def find_num_matches_for_event(year, event_code):
+    with open(
+            RUNFILES.Rlocation(
+                f"org_frc971/scouting/scraping/test_data/{year}_{event_code}.json"
+            ), "r") as file:
+        raw_match_list = json.load(file)
+    return len(raw_match_list)
+
+
 class TestDebugCli(unittest.TestCase):
 
     def setUp(self):
         self.servers = scouting.testing.scouting_test_servers.Runner()
-        self.servers.start(8080)
 
     def tearDown(self):
         self.servers.stop()
 
-    def refresh_match_list(self, year=2016, event_code="nytr"):
-        """Triggers the webserver to fetch the match list."""
-        json_path = write_json_request({
-            "year": year,
-            "event_code": event_code,
-        })
-        exit_code, stdout, stderr = run_debug_cli(
-            ["-refreshMatchList", json_path])
-        self.assertEqual(exit_code, 0, f"{year}{event_code}: {stderr}")
-        self.assertIn(
-            "(refresh_match_list_response.RefreshMatchListResponseT)", stdout)
+    def start_servers(self, year=2016, event_code="nytr"):
+        self.servers.start(8080, year=year, event_code=event_code)
+
+        expected_num_matches = find_num_matches_for_event(year, event_code)
+        json_path = write_json_request({})
+
+        # Wait until the match list is imported. This is done automatically
+        # when the web server starts up.
+        sys.stderr.write("Waiting for match list to be imported.\n")
+        while True:
+            exit_code, stdout, stderr = run_debug_cli(
+                ["-requestAllMatches", json_path])
+            self.assertEqual(exit_code, 0, stderr)
+
+            match = MATCH_LIST_LENGTH_EXTRACTION_RE.search(stdout)
+            if match:
+                num_matches_imported = int(match.group(1))
+
+                if num_matches_imported == expected_num_matches:
+                    break
+                else:
+                    sys.stderr.write(
+                        f"Waiting until {expected_num_matches} are imported. "
+                        f"Currently at {num_matches_imported}.\n")
+
+            time.sleep(0.25)
 
     def test_submit_and_request_data_scouting(self):
-        self.refresh_match_list(year=2020, event_code="fake")
+        self.start_servers(year=2020, event_code="fake")
 
         # First submit some data to be added to the database.
         json_path = write_json_request({
@@ -142,7 +176,7 @@
             }"""), stdout)
 
     def test_submit_and_request_notes(self):
-        self.refresh_match_list(year=2020, event_code="fake")
+        self.start_servers(year=2020, event_code="fake")
 
         # First submit some data to be added to the database.
         json_path = write_json_request({
@@ -179,7 +213,7 @@
             }"""), stdout)
 
     def test_submit_and_request_driver_ranking(self):
-        self.refresh_match_list(year=2020, event_code="fake")
+        self.start_servers(year=2020, event_code="fake")
 
         # First submit some data to be added to the database.
         json_path = write_json_request({
@@ -209,7 +243,7 @@
             }"""), stdout)
 
     def test_request_all_matches(self):
-        self.refresh_match_list()
+        self.start_servers()
 
         # RequestAllMatches has no fields.
         json_path = write_json_request({})
@@ -222,24 +256,6 @@
             stdout)
         self.assertEqual(stdout.count("MatchNumber:"), 90)
 
-    def test_request_all_matches(self):
-        """Makes sure that we can import the match list multiple times without problems."""
-        request_all_matches_outputs = []
-        for _ in range(2):
-            self.refresh_match_list()
-
-            # RequestAllMatches has no fields.
-            json_path = write_json_request({})
-            exit_code, stdout, stderr = run_debug_cli(
-                ["-requestAllMatches", json_path])
-
-            self.assertEqual(exit_code, 0, stderr)
-            request_all_matches_outputs.append(stdout)
-
-        self.maxDiff = None
-        self.assertEqual(request_all_matches_outputs[0],
-                         request_all_matches_outputs[1])
-
 
 if __name__ == "__main__":
     unittest.main()
diff --git a/scouting/webserver/requests/debug/cli/main.go b/scouting/webserver/requests/debug/cli/main.go
index 066dec5..9279dba 100644
--- a/scouting/webserver/requests/debug/cli/main.go
+++ b/scouting/webserver/requests/debug/cli/main.go
@@ -97,8 +97,6 @@
 		"If specified, parse the file as a requestAllDriverRankings JSON request.")
 	requestAllNotesPtr := flag.String("requestAllNotes", "",
 		"If specified, parse the file as a requestAllNotes JSON request.")
-	refreshMatchListPtr := flag.String("refreshMatchList", "",
-		"If specified, parse the file as a RefreshMatchList JSON request.")
 	flag.Parse()
 
 	spew.Config.Indent = *indentPtr
@@ -155,11 +153,4 @@
 		*requestAllNotesPtr,
 		*addressPtr,
 		debug.RequestAllNotes)
-
-	maybePerformRequest(
-		"RefreshMatchList",
-		"scouting/webserver/requests/messages/refresh_match_list.fbs",
-		*refreshMatchListPtr,
-		*addressPtr,
-		debug.RefreshMatchList)
 }
diff --git a/scouting/webserver/requests/debug/debug.go b/scouting/webserver/requests/debug/debug.go
index 130850c..1f215f7 100644
--- a/scouting/webserver/requests/debug/debug.go
+++ b/scouting/webserver/requests/debug/debug.go
@@ -10,7 +10,6 @@
 	"net/http"
 
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/error_response"
-	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/refresh_match_list_response"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/request_all_driver_rankings_response"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/request_all_matches_response"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/request_all_notes_response"
@@ -130,12 +129,6 @@
 		request_data_scouting_response.GetRootAsRequestDataScoutingResponse)
 }
 
-func RefreshMatchList(server string, requestBytes []byte) (*refresh_match_list_response.RefreshMatchListResponseT, error) {
-	return sendMessage[refresh_match_list_response.RefreshMatchListResponseT](
-		server+"/requests/refresh_match_list", requestBytes,
-		refresh_match_list_response.GetRootAsRefreshMatchListResponse)
-}
-
 func SubmitNotes(server string, requestBytes []byte) (*submit_notes_response.SubmitNotesResponseT, error) {
 	return sendMessage[submit_notes_response.SubmitNotesResponseT](
 		server+"/requests/submit/submit_notes", requestBytes,
diff --git a/scouting/webserver/requests/messages/BUILD b/scouting/webserver/requests/messages/BUILD
index 2e118d5..124613c 100644
--- a/scouting/webserver/requests/messages/BUILD
+++ b/scouting/webserver/requests/messages/BUILD
@@ -13,8 +13,6 @@
     "request_all_notes_response",
     "request_data_scouting",
     "request_data_scouting_response",
-    "refresh_match_list",
-    "refresh_match_list_response",
     "submit_notes",
     "submit_notes_response",
     "request_notes_for_team",
diff --git a/scouting/webserver/requests/messages/refresh_match_list.fbs b/scouting/webserver/requests/messages/refresh_match_list.fbs
deleted file mode 100644
index c4384c7..0000000
--- a/scouting/webserver/requests/messages/refresh_match_list.fbs
+++ /dev/null
@@ -1,8 +0,0 @@
-namespace scouting.webserver.requests;
-
-table RefreshMatchList {
-    year: int (id: 0);
-    event_code: string (id: 1);
-}
-
-root_type RefreshMatchList;
diff --git a/scouting/webserver/requests/messages/refresh_match_list_response.fbs b/scouting/webserver/requests/messages/refresh_match_list_response.fbs
deleted file mode 100644
index ba80272..0000000
--- a/scouting/webserver/requests/messages/refresh_match_list_response.fbs
+++ /dev/null
@@ -1,6 +0,0 @@
-namespace scouting.webserver.requests;
-
-table RefreshMatchListResponse {
-}
-
-root_type RefreshMatchListResponse;
diff --git a/scouting/webserver/requests/requests.go b/scouting/webserver/requests/requests.go
index d56f380..fab725c 100644
--- a/scouting/webserver/requests/requests.go
+++ b/scouting/webserver/requests/requests.go
@@ -12,10 +12,7 @@
 	"strings"
 
 	"github.com/frc971/971-Robot-Code/scouting/db"
-	"github.com/frc971/971-Robot-Code/scouting/scraping"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/error_response"
-	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/refresh_match_list"
-	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/refresh_match_list_response"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/request_all_driver_rankings"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/request_all_driver_rankings_response"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/request_all_matches"
@@ -52,8 +49,6 @@
 type RequestAllNotesResponseT = request_all_notes_response.RequestAllNotesResponseT
 type RequestDataScouting = request_data_scouting.RequestDataScouting
 type RequestDataScoutingResponseT = request_data_scouting_response.RequestDataScoutingResponseT
-type RefreshMatchList = refresh_match_list.RefreshMatchList
-type RefreshMatchListResponseT = refresh_match_list_response.RefreshMatchListResponseT
 type SubmitNotes = submit_notes.SubmitNotes
 type SubmitNotesResponseT = submit_notes_response.SubmitNotesResponseT
 type RequestNotesForTeam = request_notes_for_team.RequestNotesForTeam
@@ -85,8 +80,6 @@
 	AddDriverRanking(db.DriverRankingData) error
 }
 
-type ScrapeMatchList func(int32, string) ([]scraping.Match, error)
-
 // Handles unknown requests. Just returns a 404.
 func unknown(w http.ResponseWriter, req *http.Request) {
 	w.WriteHeader(http.StatusNotFound)
@@ -417,152 +410,6 @@
 	w.Write(builder.FinishedBytes())
 }
 
-func parseTeamKey(teamKey string) (int, error) {
-	// TBA prefixes teams with "frc". Not sure why. Get rid of that.
-	teamKey = strings.TrimPrefix(teamKey, "frc")
-	magnitude := 0
-	if strings.HasSuffix(teamKey, "A") {
-		magnitude = 0
-		teamKey = strings.TrimSuffix(teamKey, "A")
-	} else if strings.HasSuffix(teamKey, "B") {
-		magnitude = 9
-		teamKey = strings.TrimSuffix(teamKey, "B")
-	} else if strings.HasSuffix(teamKey, "C") {
-		magnitude = 8
-		teamKey = strings.TrimSuffix(teamKey, "C")
-	} else if strings.HasSuffix(teamKey, "D") {
-		magnitude = 7
-		teamKey = strings.TrimSuffix(teamKey, "D")
-	} else if strings.HasSuffix(teamKey, "E") {
-		magnitude = 6
-		teamKey = strings.TrimSuffix(teamKey, "E")
-	} else if strings.HasSuffix(teamKey, "F") {
-		magnitude = 5
-		teamKey = strings.TrimSuffix(teamKey, "F")
-	}
-
-	if magnitude != 0 {
-		teamKey = strconv.Itoa(magnitude) + teamKey
-	}
-
-	result, err := strconv.Atoi(teamKey)
-	return result, err
-}
-
-// Parses the alliance data from the specified match and returns the three red
-// teams and the three blue teams.
-func parseTeamKeys(match *scraping.Match) ([3]int32, [3]int32, error) {
-	redKeys := match.Alliances.Red.TeamKeys
-	blueKeys := match.Alliances.Blue.TeamKeys
-
-	if len(redKeys) != 3 || len(blueKeys) != 3 {
-		return [3]int32{}, [3]int32{}, errors.New(fmt.Sprintf(
-			"Found %d red teams and %d blue teams.", len(redKeys), len(blueKeys)))
-	}
-
-	var red [3]int32
-	for i, key := range redKeys {
-		team, err := parseTeamKey(key)
-		if err != nil {
-			return [3]int32{}, [3]int32{}, errors.New(fmt.Sprintf(
-				"Failed to parse red %d team '%s' as integer: %v", i+1, key, err))
-		}
-		red[i] = int32(team)
-	}
-	var blue [3]int32
-	for i, key := range blueKeys {
-		team, err := parseTeamKey(key)
-		if err != nil {
-			return [3]int32{}, [3]int32{}, errors.New(fmt.Sprintf(
-				"Failed to parse blue %d team '%s' as integer: %v", i+1, key, err))
-		}
-		blue[i] = int32(team)
-	}
-	return red, blue, nil
-}
-
-type refreshMatchListHandler struct {
-	db     Database
-	scrape ScrapeMatchList
-}
-
-func (handler refreshMatchListHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) {
-	requestBytes, err := io.ReadAll(req.Body)
-	if err != nil {
-		respondWithError(w, http.StatusBadRequest, fmt.Sprint("Failed to read request bytes:", err))
-		return
-	}
-
-	request, success := parseRequest(w, requestBytes, "RefreshMatchList", refresh_match_list.GetRootAsRefreshMatchList)
-	if !success {
-		return
-	}
-
-	matches, err := handler.scrape(request.Year(), string(request.EventCode()))
-	if err != nil {
-		respondWithError(w, http.StatusInternalServerError, fmt.Sprint("Faled to scrape match list: ", err))
-		return
-	}
-
-	for _, match := range matches {
-		// Make sure the data is valid.
-		red, blue, err := parseTeamKeys(&match)
-		if err != nil {
-			respondWithError(w, http.StatusInternalServerError, fmt.Sprintf(
-				"TheBlueAlliance data for match %d is malformed: %v", match.MatchNumber, err))
-			return
-		}
-
-		team_matches := []db.TeamMatch{
-			{
-				MatchNumber: int32(match.MatchNumber),
-				SetNumber:   int32(match.SetNumber), CompLevel: match.CompLevel,
-				Alliance: "R", AlliancePosition: 1, TeamNumber: red[0],
-			},
-			{
-				MatchNumber: int32(match.MatchNumber),
-				SetNumber:   int32(match.SetNumber), CompLevel: match.CompLevel,
-				Alliance: "R", AlliancePosition: 2, TeamNumber: red[1],
-			},
-			{
-				MatchNumber: int32(match.MatchNumber),
-				SetNumber:   int32(match.SetNumber), CompLevel: match.CompLevel,
-				Alliance: "R", AlliancePosition: 3, TeamNumber: red[2],
-			},
-			{
-				MatchNumber: int32(match.MatchNumber),
-				SetNumber:   int32(match.SetNumber), CompLevel: match.CompLevel,
-				Alliance: "B", AlliancePosition: 1, TeamNumber: blue[0],
-			},
-			{
-				MatchNumber: int32(match.MatchNumber),
-				SetNumber:   int32(match.SetNumber), CompLevel: match.CompLevel,
-				Alliance: "B", AlliancePosition: 2, TeamNumber: blue[1],
-			},
-			{
-				MatchNumber: int32(match.MatchNumber),
-				SetNumber:   int32(match.SetNumber), CompLevel: match.CompLevel,
-				Alliance: "B", AlliancePosition: 3, TeamNumber: blue[2],
-			},
-		}
-
-		for _, match := range team_matches {
-			// Iterate through matches to check they can be added to database.
-			err = handler.db.AddToMatch(match)
-			if err != nil {
-				respondWithError(w, http.StatusInternalServerError, fmt.Sprintf(
-					"Failed to add team %d from match %d to the database: %v", match.TeamNumber, match.MatchNumber, err))
-				return
-			}
-		}
-	}
-
-	var response RefreshMatchListResponseT
-	builder := flatbuffers.NewBuilder(1024)
-	builder.Finish((&response).Pack(builder))
-	w.Write(builder.FinishedBytes())
-}
-
 type submitNoteScoutingHandler struct {
 	db Database
 }
@@ -829,14 +676,13 @@
 	w.Write(builder.FinishedBytes())
 }
 
-func HandleRequests(db Database, scrape ScrapeMatchList, scoutingServer server.ScoutingServer) {
+func HandleRequests(db Database, scoutingServer server.ScoutingServer) {
 	scoutingServer.HandleFunc("/requests", unknown)
 	scoutingServer.Handle("/requests/submit/data_scouting", submitDataScoutingHandler{db})
 	scoutingServer.Handle("/requests/request/all_matches", requestAllMatchesHandler{db})
 	scoutingServer.Handle("/requests/request/all_notes", requestAllNotesHandler{db})
 	scoutingServer.Handle("/requests/request/all_driver_rankings", requestAllDriverRankingsHandler{db})
 	scoutingServer.Handle("/requests/request/data_scouting", requestDataScoutingHandler{db})
-	scoutingServer.Handle("/requests/refresh_match_list", refreshMatchListHandler{db, scrape})
 	scoutingServer.Handle("/requests/submit/submit_notes", submitNoteScoutingHandler{db})
 	scoutingServer.Handle("/requests/request/notes_for_team", requestNotesForTeamHandler{db})
 	scoutingServer.Handle("/requests/submit/shift_schedule", submitShiftScheduleHandler{db})
diff --git a/scouting/webserver/requests/requests_test.go b/scouting/webserver/requests/requests_test.go
index 27a45e6..ee22022 100644
--- a/scouting/webserver/requests/requests_test.go
+++ b/scouting/webserver/requests/requests_test.go
@@ -8,11 +8,8 @@
 	"testing"
 
 	"github.com/frc971/971-Robot-Code/scouting/db"
-	"github.com/frc971/971-Robot-Code/scouting/scraping"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/debug"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/error_response"
-	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/refresh_match_list"
-	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/refresh_match_list_response"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/request_all_driver_rankings"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/request_all_driver_rankings_response"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/request_all_matches"
@@ -37,7 +34,7 @@
 func Test404(t *testing.T) {
 	db := MockDatabase{}
 	scoutingServer := server.NewScoutingServer()
-	HandleRequests(&db, scrapeEmtpyMatchList, scoutingServer)
+	HandleRequests(&db, scoutingServer)
 	scoutingServer.Start(8080)
 	defer scoutingServer.Stop()
 
@@ -54,7 +51,7 @@
 func TestSubmitDataScoutingError(t *testing.T) {
 	db := MockDatabase{}
 	scoutingServer := server.NewScoutingServer()
-	HandleRequests(&db, scrapeEmtpyMatchList, scoutingServer)
+	HandleRequests(&db, scoutingServer)
 	scoutingServer.Start(8080)
 	defer scoutingServer.Stop()
 
@@ -82,7 +79,7 @@
 func TestSubmitDataScouting(t *testing.T) {
 	db := MockDatabase{}
 	scoutingServer := server.NewScoutingServer()
-	HandleRequests(&db, scrapeEmtpyMatchList, scoutingServer)
+	HandleRequests(&db, scoutingServer)
 	scoutingServer.Start(8080)
 	defer scoutingServer.Stop()
 
@@ -201,7 +198,7 @@
 		},
 	}
 	scoutingServer := server.NewScoutingServer()
-	HandleRequests(&db, scrapeEmtpyMatchList, scoutingServer)
+	HandleRequests(&db, scoutingServer)
 	scoutingServer.Start(8080)
 	defer scoutingServer.Stop()
 
@@ -267,7 +264,7 @@
 		},
 	}
 	scoutingServer := server.NewScoutingServer()
-	HandleRequests(&db, scrapeEmtpyMatchList, scoutingServer)
+	HandleRequests(&db, scoutingServer)
 	scoutingServer.Start(8080)
 	defer scoutingServer.Stop()
 
@@ -322,7 +319,7 @@
 func TestSubmitNotes(t *testing.T) {
 	database := MockDatabase{}
 	scoutingServer := server.NewScoutingServer()
-	HandleRequests(&database, scrapeEmtpyMatchList, scoutingServer)
+	HandleRequests(&database, scoutingServer)
 	scoutingServer.Start(8080)
 	defer scoutingServer.Stop()
 
@@ -375,7 +372,7 @@
 		}},
 	}
 	scoutingServer := server.NewScoutingServer()
-	HandleRequests(&database, scrapeEmtpyMatchList, scoutingServer)
+	HandleRequests(&database, scoutingServer)
 	scoutingServer.Start(8080)
 	defer scoutingServer.Stop()
 
@@ -417,7 +414,7 @@
 		},
 	}
 	scoutingServer := server.NewScoutingServer()
-	HandleRequests(&db, scrapeEmtpyMatchList, scoutingServer)
+	HandleRequests(&db, scoutingServer)
 	scoutingServer.Start(8080)
 	defer scoutingServer.Stop()
 
@@ -464,7 +461,7 @@
 func TestSubmitShiftSchedule(t *testing.T) {
 	database := MockDatabase{}
 	scoutingServer := server.NewScoutingServer()
-	HandleRequests(&database, scrapeEmtpyMatchList, scoutingServer)
+	HandleRequests(&database, scoutingServer)
 	scoutingServer.Start(8080)
 	defer scoutingServer.Stop()
 
@@ -500,98 +497,10 @@
 	}
 }
 
-// Validates that we can download the schedule from The Blue Alliance.
-func TestRefreshMatchList(t *testing.T) {
-	scrapeMockSchedule := func(int32, string) ([]scraping.Match, error) {
-		return []scraping.Match{
-			{
-				CompLevel:   "qual",
-				MatchNumber: 1,
-				SetNumber:   2,
-				Alliances: scraping.Alliances{
-					Red: scraping.Alliance{
-						TeamKeys: []string{
-							"100",
-							"200",
-							"300",
-						},
-					},
-					Blue: scraping.Alliance{
-						TeamKeys: []string{
-							"101",
-							"201",
-							"301",
-						},
-					},
-				},
-				WinningAlliance: "",
-				EventKey:        "",
-				Time:            0,
-				PredictedTime:   0,
-				ActualTime:      0,
-				PostResultTime:  0,
-				ScoreBreakdowns: scraping.ScoreBreakdowns{},
-			},
-		}, nil
-	}
-
-	database := MockDatabase{}
-	scoutingServer := server.NewScoutingServer()
-	HandleRequests(&database, scrapeMockSchedule, scoutingServer)
-	scoutingServer.Start(8080)
-	defer scoutingServer.Stop()
-
-	builder := flatbuffers.NewBuilder(1024)
-	builder.Finish((&refresh_match_list.RefreshMatchListT{}).Pack(builder))
-
-	response, err := debug.RefreshMatchList("http://localhost:8080", builder.FinishedBytes())
-	if err != nil {
-		t.Fatal("Failed to request all matches: ", err)
-	}
-
-	// Validate the response.
-	expected := refresh_match_list_response.RefreshMatchListResponseT{}
-	if !reflect.DeepEqual(expected, *response) {
-		t.Fatal("Expected ", expected, ", but got ", *response)
-	}
-
-	// Make sure that the data made it into the database.
-	expectedMatches := []db.TeamMatch{
-		{
-			MatchNumber: 1, SetNumber: 2, CompLevel: "qual",
-			Alliance: "R", AlliancePosition: 1, TeamNumber: 100,
-		},
-		{
-			MatchNumber: 1, SetNumber: 2, CompLevel: "qual",
-			Alliance: "R", AlliancePosition: 2, TeamNumber: 200,
-		},
-		{
-			MatchNumber: 1, SetNumber: 2, CompLevel: "qual",
-			Alliance: "R", AlliancePosition: 3, TeamNumber: 300,
-		},
-		{
-			MatchNumber: 1, SetNumber: 2, CompLevel: "qual",
-			Alliance: "B", AlliancePosition: 1, TeamNumber: 101,
-		},
-		{
-			MatchNumber: 1, SetNumber: 2, CompLevel: "qual",
-			Alliance: "B", AlliancePosition: 2, TeamNumber: 201,
-		},
-		{
-			MatchNumber: 1, SetNumber: 2, CompLevel: "qual",
-			Alliance: "B", AlliancePosition: 3, TeamNumber: 301,
-		},
-	}
-
-	if !reflect.DeepEqual(expectedMatches, database.matches) {
-		t.Fatal("Expected ", expectedMatches, ", but got ", database.matches)
-	}
-}
-
 func TestSubmitDriverRanking(t *testing.T) {
 	database := MockDatabase{}
 	scoutingServer := server.NewScoutingServer()
-	HandleRequests(&database, scrapeEmtpyMatchList, scoutingServer)
+	HandleRequests(&database, scoutingServer)
 	scoutingServer.Start(8080)
 	defer scoutingServer.Stop()
 
@@ -636,7 +545,7 @@
 		},
 	}
 	scoutingServer := server.NewScoutingServer()
-	HandleRequests(&db, scrapeEmtpyMatchList, scoutingServer)
+	HandleRequests(&db, scoutingServer)
 	scoutingServer.Start(8080)
 	defer scoutingServer.Stop()
 
@@ -701,7 +610,7 @@
 		},
 	}
 	scoutingServer := server.NewScoutingServer()
-	HandleRequests(&db, scrapeEmtpyMatchList, scoutingServer)
+	HandleRequests(&db, scoutingServer)
 	scoutingServer.Start(8080)
 	defer scoutingServer.Stop()
 
@@ -820,8 +729,3 @@
 func (database *MockDatabase) ReturnAllDriverRankings() ([]db.DriverRankingData, error) {
 	return database.driver_ranking, nil
 }
-
-// Returns an empty match list from the fake The Blue Alliance scraping.
-func scrapeEmtpyMatchList(int32, string) ([]scraping.Match, error) {
-	return nil, nil
-}
diff --git a/scouting/www/BUILD b/scouting/www/BUILD
index cbcecc3..93bcbb2 100644
--- a/scouting/www/BUILD
+++ b/scouting/www/BUILD
@@ -17,7 +17,6 @@
         "//:node_modules/@angular/animations",
         "//scouting/www/driver_ranking:_lib",
         "//scouting/www/entry:_lib",
-        "//scouting/www/import_match_list:_lib",
         "//scouting/www/match_list:_lib",
         "//scouting/www/notes:_lib",
         "//scouting/www/shift_schedule:_lib",
diff --git a/scouting/www/app/app.module.ts b/scouting/www/app/app.module.ts
index ead8b37..7b200d0 100644
--- a/scouting/www/app/app.module.ts
+++ b/scouting/www/app/app.module.ts
@@ -4,7 +4,6 @@
 
 import {App} from './app';
 import {EntryModule} from '../entry';
-import {ImportMatchListModule} from '../import_match_list';
 import {MatchListModule} from '../match_list';
 import {NotesModule} from '../notes';
 import {ShiftScheduleModule} from '../shift_schedule';
@@ -18,7 +17,6 @@
     BrowserAnimationsModule,
     EntryModule,
     NotesModule,
-    ImportMatchListModule,
     MatchListModule,
     ShiftScheduleModule,
     DriverRankingModule,
diff --git a/scouting/www/app/app.ng.html b/scouting/www/app/app.ng.html
index d9dbead..526cd61 100644
--- a/scouting/www/app/app.ng.html
+++ b/scouting/www/app/app.ng.html
@@ -49,15 +49,6 @@
   <li class="nav-item">
     <a
       class="nav-link"
-      [class.active]="tabIs('ImportMatchList')"
-      (click)="switchTabToGuarded('ImportMatchList')"
-    >
-      Import Match List
-    </a>
-  </li>
-  <li class="nav-item">
-    <a
-      class="nav-link"
       [class.active]="tabIs('ShiftSchedule')"
       (click)="switchTabToGuarded('ShiftSchedule')"
     >
@@ -90,9 +81,6 @@
   ></app-entry>
   <frc971-notes *ngSwitchCase="'Notes'"></frc971-notes>
   <app-driver-ranking *ngSwitchCase="'DriverRanking'"></app-driver-ranking>
-  <app-import-match-list
-    *ngSwitchCase="'ImportMatchList'"
-  ></app-import-match-list>
   <shift-schedule *ngSwitchCase="'ShiftSchedule'"></shift-schedule>
   <app-view
     (switchTabsEvent)="switchTabTo($event)"
diff --git a/scouting/www/app/app.ts b/scouting/www/app/app.ts
index 7e81d84..f7d2770 100644
--- a/scouting/www/app/app.ts
+++ b/scouting/www/app/app.ts
@@ -5,12 +5,11 @@
   | 'Notes'
   | 'Entry'
   | 'DriverRanking'
-  | 'ImportMatchList'
   | 'ShiftSchedule'
   | 'View';
 
 // Ignore the guard for tabs that don't require the user to enter any data.
-const unguardedTabs: Tab[] = ['MatchList', 'ImportMatchList'];
+const unguardedTabs: Tab[] = ['MatchList'];
 
 type TeamInMatch = {
   teamNumber: number;
diff --git a/scouting/www/import_match_list/BUILD b/scouting/www/import_match_list/BUILD
deleted file mode 100644
index bc1d5d5..0000000
--- a/scouting/www/import_match_list/BUILD
+++ /dev/null
@@ -1,18 +0,0 @@
-load("@npm//:defs.bzl", "npm_link_all_packages")
-load("//tools/build_rules:js.bzl", "ng_pkg")
-
-npm_link_all_packages(name = "node_modules")
-
-ng_pkg(
-    name = "import_match_list",
-    extra_srcs = [
-        "//scouting/www:app_common_css",
-    ],
-    deps = [
-        ":node_modules/@angular/forms",
-        "//scouting/webserver/requests/messages:error_response_ts_fbs",
-        "//scouting/webserver/requests/messages:refresh_match_list_response_ts_fbs",
-        "//scouting/webserver/requests/messages:refresh_match_list_ts_fbs",
-        "@com_github_google_flatbuffers//ts:flatbuffers_ts",
-    ],
-)
diff --git a/scouting/www/import_match_list/import_match_list.component.css b/scouting/www/import_match_list/import_match_list.component.css
deleted file mode 100644
index e220645..0000000
--- a/scouting/www/import_match_list/import_match_list.component.css
+++ /dev/null
@@ -1,3 +0,0 @@
-* {
-  padding: 10px;
-}
diff --git a/scouting/www/import_match_list/import_match_list.component.ts b/scouting/www/import_match_list/import_match_list.component.ts
deleted file mode 100644
index 526a636..0000000
--- a/scouting/www/import_match_list/import_match_list.component.ts
+++ /dev/null
@@ -1,60 +0,0 @@
-import {Component, OnInit} from '@angular/core';
-
-import {Builder, ByteBuffer} from 'flatbuffers';
-import {ErrorResponse} from '../../webserver/requests/messages/error_response_generated';
-import {RefreshMatchListResponse} from '../../webserver/requests/messages/refresh_match_list_response_generated';
-import {RefreshMatchList} from '../../webserver/requests/messages/refresh_match_list_generated';
-
-@Component({
-  selector: 'app-import-match-list',
-  templateUrl: './import_match_list.ng.html',
-  styleUrls: ['../app/common.css', './import_match_list.component.css'],
-})
-export class ImportMatchListComponent {
-  year: number = new Date().getFullYear();
-  eventCode: string = '';
-  progressMessage: string = '';
-  errorMessage: string = '';
-
-  async importMatchList() {
-    const block_alerts = document.getElementById(
-      'block_alerts'
-    ) as HTMLInputElement;
-    console.log(block_alerts.checked);
-    if (!block_alerts.checked) {
-      if (!window.confirm('Actually import new matches?')) {
-        return;
-      }
-    }
-
-    this.errorMessage = '';
-
-    const builder = new Builder();
-    const eventCode = builder.createString(this.eventCode);
-    RefreshMatchList.startRefreshMatchList(builder);
-    RefreshMatchList.addYear(builder, this.year);
-    RefreshMatchList.addEventCode(builder, eventCode);
-    builder.finish(RefreshMatchList.endRefreshMatchList(builder));
-
-    this.progressMessage = 'Importing match list. Please be patient.';
-
-    const buffer = builder.asUint8Array();
-    const res = await fetch('/requests/refresh_match_list', {
-      method: 'POST',
-      body: buffer,
-    });
-
-    if (res.ok) {
-      // We successfully submitted the data.
-      this.progressMessage = 'Successfully imported match list.';
-    } else {
-      this.progressMessage = '';
-      const resBuffer = await res.arrayBuffer();
-      const fbBuffer = new ByteBuffer(new Uint8Array(resBuffer));
-      const parsedResponse = ErrorResponse.getRootAsErrorResponse(fbBuffer);
-
-      const errorMessage = parsedResponse.errorMessage();
-      this.errorMessage = `Received ${res.status} ${res.statusText}: "${errorMessage}"`;
-    }
-  }
-}
diff --git a/scouting/www/import_match_list/import_match_list.module.ts b/scouting/www/import_match_list/import_match_list.module.ts
deleted file mode 100644
index cdb6fea..0000000
--- a/scouting/www/import_match_list/import_match_list.module.ts
+++ /dev/null
@@ -1,11 +0,0 @@
-import {NgModule} from '@angular/core';
-import {CommonModule} from '@angular/common';
-import {ImportMatchListComponent} from './import_match_list.component';
-import {FormsModule} from '@angular/forms';
-
-@NgModule({
-  declarations: [ImportMatchListComponent],
-  exports: [ImportMatchListComponent],
-  imports: [CommonModule, FormsModule],
-})
-export class ImportMatchListModule {}
diff --git a/scouting/www/import_match_list/import_match_list.ng.html b/scouting/www/import_match_list/import_match_list.ng.html
deleted file mode 100644
index fdd7687..0000000
--- a/scouting/www/import_match_list/import_match_list.ng.html
+++ /dev/null
@@ -1,20 +0,0 @@
-<div class="header">
-  <h2>Import Match List</h2>
-</div>
-
-<div class="container-fluid">
-  <div class="row">
-    <label for="year">Year</label>
-    <input [(ngModel)]="year" type="number" id="year" min="1970" max="2500" />
-  </div>
-  <div class="row">
-    <label for="event_code">Event Code</label>
-    <input [(ngModel)]="eventCode" type="text" id="event_code" />
-  </div>
-
-  <span class="progress_message">{{ progressMessage }}</span>
-  <span class="error_message">{{ errorMessage }}</span>
-  <div class="text-right">
-    <button class="btn btn-primary" (click)="importMatchList()">Import</button>
-  </div>
-</div>
diff --git a/scouting/www/import_match_list/package.json b/scouting/www/import_match_list/package.json
deleted file mode 100644
index 05aa790..0000000
--- a/scouting/www/import_match_list/package.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-    "name": "@org_frc971/scouting/www/import_match_list",
-    "private": true,
-    "dependencies": {
-        "@angular/forms": "15.1.5"
-    }
-}
diff --git a/y2023/constants.cc b/y2023/constants.cc
index 8908811..b7b444d 100644
--- a/y2023/constants.cc
+++ b/y2023/constants.cc
@@ -117,11 +117,12 @@
                                          0.0143810684138064;
 
       roll_joint->zeroing.measured_absolute_position = 1.86685853969852;
-      roll_joint->potentiometer_offset =
-          0.624713611895747 + 3.10458504917251 - 0.0966407797407789;
+      roll_joint->potentiometer_offset = 0.624713611895747 + 3.10458504917251 -
+                                         0.0966407797407789 +
+                                         0.0257708772364788;
 
       wrist->subsystem_params.zeroing_constants.measured_absolute_position =
-          -0.607792293122026;
+          0.615125389629717;
 
       break;