scouting: Add an endpoint for populating the match schedule

This patch combines the scraping library with the scouting webserver.
There's now also a new end point for the web page (or debug CLI tool)
to ask the server to fetch the match list. The end point is
`/requests/refresh_match_list`.

All the tests are updated. The `cli_test` downloads a 2016 ny_tr match
list that I downloaded from TBA. It should be a decent integration
test as it uses representative data.

Signed-off-by: Philipp Schrader <philipp.schrader@gmail.com>
Change-Id: I6c540590521b00887eb2ddde2a9369875c659551
diff --git a/scouting/webserver/BUILD b/scouting/webserver/BUILD
index 66db8e8..745852a 100644
--- a/scouting/webserver/BUILD
+++ b/scouting/webserver/BUILD
@@ -8,6 +8,7 @@
     visibility = ["//visibility:private"],
     deps = [
         "//scouting/db",
+        "//scouting/scraping",
         "//scouting/webserver/requests",
         "//scouting/webserver/server",
         "//scouting/webserver/static",
diff --git a/scouting/webserver/main.go b/scouting/webserver/main.go
index 3c699fd..94e0222 100644
--- a/scouting/webserver/main.go
+++ b/scouting/webserver/main.go
@@ -1,6 +1,7 @@
 package main
 
 import (
+	"errors"
 	"flag"
 	"fmt"
 	"io/ioutil"
@@ -11,6 +12,7 @@
 	"syscall"
 
 	"github.com/frc971/971-Robot-Code/scouting/db"
+	"github.com/frc971/971-Robot-Code/scouting/scraping"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/server"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/static"
@@ -40,6 +42,10 @@
 	portPtr := flag.Int("port", 8080, "The port number to bind to.")
 	dirPtr := flag.String("directory", ".", "The directory to serve at /.")
 	dbPathPtr := flag.String("database", getDefaultDatabasePath(), "The path to the database.")
+	blueAllianceConfigPtr := flag.String("tba_config", "",
+		"The path to your The Blue Alliance JSON config. "+
+			"It needs an \"api_key\" field with your TBA API key. "+
+			"Optionally, it can have a \"url\" field with the TBA API base URL.")
 	flag.Parse()
 
 	database, err := db.NewDatabase(*dbPathPtr)
@@ -47,9 +53,16 @@
 		log.Fatal("Failed to connect to database: ", err)
 	}
 
+	scrapeMatchList := func(year int32, eventCode string) ([]scraping.Match, error) {
+		if *blueAllianceConfigPtr == "" {
+			return nil, errors.New("Cannot scrape TBA's match list without a config file.")
+		}
+		return scraping.AllMatches(year, eventCode, *blueAllianceConfigPtr)
+	}
+
 	scoutingServer := server.NewScoutingServer()
 	static.ServePages(scoutingServer, *dirPtr)
-	requests.HandleRequests(database, scoutingServer)
+	requests.HandleRequests(database, scrapeMatchList, scoutingServer)
 	scoutingServer.Start(*portPtr)
 	fmt.Println("Serving", *dirPtr, "on port", *portPtr)
 
diff --git a/scouting/webserver/requests/BUILD b/scouting/webserver/requests/BUILD
index 196c522..df487f2 100644
--- a/scouting/webserver/requests/BUILD
+++ b/scouting/webserver/requests/BUILD
@@ -8,7 +8,10 @@
     visibility = ["//visibility:public"],
     deps = [
         "//scouting/db",
+        "//scouting/scraping",
         "//scouting/webserver/requests/messages:error_response_go_fbs",
+        "//scouting/webserver/requests/messages:refresh_match_list_go_fbs",
+        "//scouting/webserver/requests/messages:refresh_match_list_response_go_fbs",
         "//scouting/webserver/requests/messages:request_all_matches_go_fbs",
         "//scouting/webserver/requests/messages:request_all_matches_response_go_fbs",
         "//scouting/webserver/requests/messages:request_data_scouting_go_fbs",
@@ -29,8 +32,11 @@
     target_compatible_with = ["@platforms//cpu:x86_64"],
     deps = [
         "//scouting/db",
+        "//scouting/scraping",
         "//scouting/webserver/requests/debug",
         "//scouting/webserver/requests/messages:error_response_go_fbs",
+        "//scouting/webserver/requests/messages:refresh_match_list_go_fbs",
+        "//scouting/webserver/requests/messages:refresh_match_list_response_go_fbs",
         "//scouting/webserver/requests/messages:request_all_matches_go_fbs",
         "//scouting/webserver/requests/messages:request_all_matches_response_go_fbs",
         "//scouting/webserver/requests/messages:request_data_scouting_go_fbs",
diff --git a/scouting/webserver/requests/debug/BUILD b/scouting/webserver/requests/debug/BUILD
index e3028dc..402503f 100644
--- a/scouting/webserver/requests/debug/BUILD
+++ b/scouting/webserver/requests/debug/BUILD
@@ -8,6 +8,7 @@
     visibility = ["//visibility:public"],
     deps = [
         "//scouting/webserver/requests/messages:error_response_go_fbs",
+        "//scouting/webserver/requests/messages:refresh_match_list_response_go_fbs",
         "//scouting/webserver/requests/messages:request_all_matches_response_go_fbs",
         "//scouting/webserver/requests/messages:request_data_scouting_response_go_fbs",
         "//scouting/webserver/requests/messages:request_matches_for_team_response_go_fbs",
diff --git a/scouting/webserver/requests/debug/cli/BUILD b/scouting/webserver/requests/debug/cli/BUILD
index aba9177..903f8c8 100644
--- a/scouting/webserver/requests/debug/cli/BUILD
+++ b/scouting/webserver/requests/debug/cli/BUILD
@@ -10,7 +10,10 @@
     importpath = "github.com/frc971/971-Robot-Code/scouting/webserver/requests/debug/cli",
     target_compatible_with = ["@platforms//cpu:x86_64"],
     visibility = ["//visibility:private"],
-    deps = ["//scouting/webserver/requests/debug"],
+    deps = [
+        "//scouting/webserver/requests/debug",
+        "@com_github_davecgh_go_spew//spew",
+    ],
 )
 
 go_binary(
@@ -27,6 +30,7 @@
     ],
     data = [
         ":cli",
+        "//scouting/scraping:test_data",
         "//scouting/webserver",
     ],
 )
diff --git a/scouting/webserver/requests/debug/cli/cli_test.py b/scouting/webserver/requests/debug/cli/cli_test.py
index 347c828..a737d59 100644
--- a/scouting/webserver/requests/debug/cli/cli_test.py
+++ b/scouting/webserver/requests/debug/cli/cli_test.py
@@ -2,6 +2,7 @@
 
 import json
 import os
+import re
 from pathlib import Path
 import shutil
 import socket
@@ -10,11 +11,10 @@
 from typing import Any, Dict, List
 import unittest
 
-def write_json(content: Dict[str, Any]):
+def write_json_request(content: Dict[str, Any]):
     """Writes a JSON file with the specified dict content."""
     json_path = Path(os.environ["TEST_TMPDIR"]) / "test.json"
-    with open(json_path, "w") as file:
-        file.write(json.dumps(content))
+    json_path.write_text(json.dumps(content))
     return json_path
 
 def run_debug_cli(args: List[str]):
@@ -30,28 +30,81 @@
         run_result.stderr.decode("utf-8"),
     )
 
+def wait_for_server(port: int):
+    """Waits for the server at the specified port to respond to TCP connections."""
+    while True:
+        try:
+            connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+            connection.connect(("localhost", port))
+            connection.close()
+            break
+        except ConnectionRefusedError:
+            connection.close()
+            time.sleep(0.01)
+
+
 class TestDebugCli(unittest.TestCase):
 
     def setUp(self):
-        self.webserver = subprocess.Popen(["scouting/webserver/webserver_/webserver"])
+        tmpdir = Path(os.environ["TEST_TMPDIR"]) / "temp"
+        try:
+            shutil.rmtree(tmpdir)
+        except FileNotFoundError:
+            pass
+        os.mkdir(tmpdir)
 
-        # Wait for the server to respond to requests.
-        while True:
-            try:
-                connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-                connection.connect(("localhost", 8080))
-                connection.close()
-                break
-            except ConnectionRefusedError:
-                connection.close()
-                time.sleep(0.01)
+        # Copy the test data into place so that the final API call can be
+        # emulated.
+        tba_api_dir = tmpdir / "api" / "v3" / "event" / "1234event_key"
+        os.makedirs(tba_api_dir)
+        (tba_api_dir / "matches").write_text(
+            Path("scouting/scraping/test_data/2016_nytr.json").read_text()
+        )
+
+        # Create a fake TBA server to serve the static match list.
+        self.fake_tba_api = subprocess.Popen(
+            ["python3", "-m", "http.server", "7000"],
+            cwd=tmpdir,
+        )
+
+        # Configure the scouting webserver to scrape data from our fake TBA
+        # server.
+        scouting_config = tmpdir / "scouting_config.json"
+        scouting_config.write_text(json.dumps({
+            "api_key": "dummy_key_that_is_not_actually_used_in_this_test",
+            "base_url": "http://localhost:7000",
+        }))
+
+        # Run the scouting webserver.
+        self.webserver = subprocess.Popen([
+            "scouting/webserver/webserver_/webserver",
+            "-port=8080",
+            "-database=%s/database.db" % tmpdir,
+            "-tba_config=%s/scouting_config.json" % tmpdir,
+        ])
+
+        # Wait for the servers to be reachable.
+        wait_for_server(7000)
+        wait_for_server(8080)
 
     def tearDown(self):
+        self.fake_tba_api.terminate()
         self.webserver.terminate()
+        self.fake_tba_api.wait()
         self.webserver.wait()
 
+    def refresh_match_list(self):
+        """Triggers the webserver to fetch the match list."""
+        json_path = write_json_request({
+            "year": 1234,
+            "event_code": "event_key",
+        })
+        exit_code, stdout, stderr = run_debug_cli(["-refreshMatchList", json_path])
+        self.assertEqual(exit_code, 0, stderr)
+        self.assertIn("(refresh_match_list_response.RefreshMatchListResponseT)", stdout)
+
     def test_submit_data_scouting(self):
-        json_path = write_json({
+        json_path = write_json_request({
             "team": 971,
             "match": 42,
             "missed_shots_auto": 9971,
@@ -70,31 +123,37 @@
         self.assertIn("/requests/submit/data_scouting returned 501 Not Implemented", stderr)
 
     def test_request_all_matches(self):
-        # RequestAllMatches has no fields.
-        json_path = write_json({})
-        exit_code, _stdout, stderr = run_debug_cli(["-requestAllMatches", json_path])
+        self.refresh_match_list()
 
-        # TODO(phil): Actually add some matches here.
-        self.assertEqual(exit_code, 0)
-        self.assertIn("{MatchList:[]}", stderr)
+        # RequestAllMatches has no fields.
+        json_path = write_json_request({})
+        exit_code, stdout, stderr = run_debug_cli(["-requestAllMatches", json_path])
+
+        self.assertEqual(exit_code, 0, stderr)
+        self.assertIn("MatchList: ([]*request_all_matches_response.MatchT) (len=90 cap=90) {", stdout)
+        self.assertEqual(stdout.count("MatchNumber:"), 90)
 
     def test_request_matches_for_team(self):
-        json_path = write_json({
-            "team": 971,
-        })
-        exit_code, _stdout, stderr = run_debug_cli(["-requestMatchesForTeam", json_path])
+        self.refresh_match_list()
 
-        # TODO(phil): Actually add some matches here.
-        self.assertEqual(exit_code, 0)
-        self.assertIn("{MatchList:[]}", stderr)
+        json_path = write_json_request({
+            "team": 4856,
+        })
+        exit_code, stdout, stderr = run_debug_cli(["-requestMatchesForTeam", json_path])
+
+        # Team 4856 has 12 matches.
+        self.assertEqual(exit_code, 0, stderr)
+        self.assertIn("MatchList: ([]*request_matches_for_team_response.MatchT) (len=12 cap=12) {", stdout)
+        self.assertEqual(stdout.count("MatchNumber:"), 12)
+        self.assertEqual(len(re.findall(r": \(int32\) 4856[,\n]", stdout)), 12)
 
     def test_request_data_scouting(self):
-        json_path = write_json({})
-        exit_code, _stdout, stderr = run_debug_cli(["-requestDataScouting", json_path])
+        json_path = write_json_request({})
+        exit_code, stdout, stderr = run_debug_cli(["-requestDataScouting", json_path])
 
         # TODO(phil): Actually add data here before querying it.
-        self.assertEqual(exit_code, 0)
-        self.assertIn("{StatsList:[]}", stderr)
+        self.assertEqual(exit_code, 0, stderr)
+        self.assertIn("(request_data_scouting_response.RequestDataScoutingResponseT)", stdout)
 
 if __name__ == "__main__":
     unittest.main()
diff --git a/scouting/webserver/requests/debug/cli/main.go b/scouting/webserver/requests/debug/cli/main.go
index 0782d82..03032be 100644
--- a/scouting/webserver/requests/debug/cli/main.go
+++ b/scouting/webserver/requests/debug/cli/main.go
@@ -11,6 +11,7 @@
 	"os/exec"
 	"path/filepath"
 
+	"github.com/davecgh/go-spew/spew"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/debug"
 )
 
@@ -76,6 +77,8 @@
 		"If specified, parse the file as a RequestMatchesForTeam JSON request.")
 	requestDataScoutingPtr := flag.String("requestDataScouting", "",
 		"If specified, parse the file as a RequestDataScouting JSON request.")
+	refreshMatchListPtr := flag.String("refreshMatchList", "",
+		"If specified, parse the file as a RefreshMatchList JSON request.")
 	flag.Parse()
 
 	// Handle the actual arguments.
@@ -88,7 +91,7 @@
 		if err != nil {
 			log.Fatal("Failed SubmitDataScouting: ", err)
 		}
-		log.Printf("%+v", *response)
+		spew.Dump(*response)
 	}
 	if *requestAllMatchesPtr != "" {
 		log.Printf("Sending RequestAllMatches to %s", *addressPtr)
@@ -99,7 +102,7 @@
 		if err != nil {
 			log.Fatal("Failed RequestAllMatches: ", err)
 		}
-		log.Printf("%+v", *response)
+		spew.Dump(*response)
 	}
 	if *requestMatchesForTeamPtr != "" {
 		log.Printf("Sending RequestMatchesForTeam to %s", *addressPtr)
@@ -110,7 +113,7 @@
 		if err != nil {
 			log.Fatal("Failed RequestMatchesForTeam: ", err)
 		}
-		log.Printf("%+v", *response)
+		spew.Dump(*response)
 	}
 	if *requestDataScoutingPtr != "" {
 		log.Printf("Sending RequestDataScouting to %s", *addressPtr)
@@ -121,6 +124,17 @@
 		if err != nil {
 			log.Fatal("Failed RequestDataScouting: ", err)
 		}
-		log.Printf("%+v", *response)
+		spew.Dump(*response)
+	}
+	if *refreshMatchListPtr != "" {
+		log.Printf("Sending RefreshMatchList to %s", *addressPtr)
+		binaryRequest := parseJson(
+			"scouting/webserver/requests/messages/refresh_match_list.fbs",
+			*refreshMatchListPtr)
+		response, err := debug.RefreshMatchList(*addressPtr, binaryRequest)
+		if err != nil {
+			log.Fatal("Failed RefreshMatchList: ", err)
+		}
+		spew.Dump(*response)
 	}
 }
diff --git a/scouting/webserver/requests/debug/debug.go b/scouting/webserver/requests/debug/debug.go
index 6515e81..81be3d1 100644
--- a/scouting/webserver/requests/debug/debug.go
+++ b/scouting/webserver/requests/debug/debug.go
@@ -9,6 +9,7 @@
 	"net/http"
 
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/error_response"
+	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/refresh_match_list_response"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/request_all_matches_response"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/request_data_scouting_response"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/request_matches_for_team_response"
@@ -20,6 +21,7 @@
 type RequestAllMatchesResponseT = request_all_matches_response.RequestAllMatchesResponseT
 type RequestMatchesForTeamResponseT = request_matches_for_team_response.RequestMatchesForTeamResponseT
 type RequestDataScoutingResponseT = request_data_scouting_response.RequestDataScoutingResponseT
+type RefreshMatchListResponseT = refresh_match_list_response.RefreshMatchListResponseT
 
 // A struct that can be used as an `error`. It contains information about the
 // why the server was unhappy and what the corresponding request was.
@@ -127,3 +129,15 @@
 	response := request_data_scouting_response.GetRootAsRequestDataScoutingResponse(responseBytes, 0)
 	return response.UnPack(), nil
 }
+
+// Sends a `RefreshMatchList` message to the server and returns the
+// deserialized response.
+func RefreshMatchList(server string, requestBytes []byte) (*RefreshMatchListResponseT, error) {
+	responseBytes, err := performPost(server+"/requests/refresh_match_list", requestBytes)
+	if err != nil {
+		return nil, err
+	}
+	log.Printf("Parsing RefreshMatchListResponse")
+	response := refresh_match_list_response.GetRootAsRefreshMatchListResponse(responseBytes, 0)
+	return response.UnPack(), nil
+}
diff --git a/scouting/webserver/requests/messages/BUILD b/scouting/webserver/requests/messages/BUILD
index 53ceab2..c27f730 100644
--- a/scouting/webserver/requests/messages/BUILD
+++ b/scouting/webserver/requests/messages/BUILD
@@ -10,6 +10,8 @@
     "request_matches_for_team_response",
     "request_data_scouting",
     "request_data_scouting_response",
+    "refresh_match_list",
+    "refresh_match_list_response",
 )
 
 filegroup(
diff --git a/scouting/webserver/requests/messages/refresh_match_list.fbs b/scouting/webserver/requests/messages/refresh_match_list.fbs
new file mode 100644
index 0000000..c4384c7
--- /dev/null
+++ b/scouting/webserver/requests/messages/refresh_match_list.fbs
@@ -0,0 +1,8 @@
+namespace scouting.webserver.requests;
+
+table RefreshMatchList {
+    year: int (id: 0);
+    event_code: string (id: 1);
+}
+
+root_type RefreshMatchList;
diff --git a/scouting/webserver/requests/messages/refresh_match_list_response.fbs b/scouting/webserver/requests/messages/refresh_match_list_response.fbs
new file mode 100644
index 0000000..ba80272
--- /dev/null
+++ b/scouting/webserver/requests/messages/refresh_match_list_response.fbs
@@ -0,0 +1,6 @@
+namespace scouting.webserver.requests;
+
+table RefreshMatchListResponse {
+}
+
+root_type RefreshMatchListResponse;
diff --git a/scouting/webserver/requests/requests.go b/scouting/webserver/requests/requests.go
index b3e03ff..93ee128 100644
--- a/scouting/webserver/requests/requests.go
+++ b/scouting/webserver/requests/requests.go
@@ -1,12 +1,18 @@
 package requests
 
 import (
+	"errors"
 	"fmt"
 	"io"
 	"net/http"
+	"strconv"
+	"strings"
 
 	"github.com/frc971/971-Robot-Code/scouting/db"
+	"github.com/frc971/971-Robot-Code/scouting/scraping"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/error_response"
+	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/refresh_match_list"
+	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/refresh_match_list_response"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/request_all_matches"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/request_all_matches_response"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/request_data_scouting"
@@ -26,6 +32,8 @@
 type RequestMatchesForTeamResponseT = request_matches_for_team_response.RequestMatchesForTeamResponseT
 type RequestDataScouting = request_data_scouting.RequestDataScouting
 type RequestDataScoutingResponseT = request_data_scouting_response.RequestDataScoutingResponseT
+type RefreshMatchList = refresh_match_list.RefreshMatchList
+type RefreshMatchListResponseT = refresh_match_list_response.RefreshMatchListResponseT
 
 // The interface we expect the database abstraction to conform to.
 // We use an interface here because it makes unit testing easier.
@@ -38,6 +46,8 @@
 	QueryStats(int) ([]db.Stats, error)
 }
 
+type ScrapeMatchList func(int32, string) ([]scraping.Match, error)
+
 // Handles unknown requests. Just returns a 404.
 func unknown(w http.ResponseWriter, req *http.Request) {
 	w.WriteHeader(http.StatusNotFound)
@@ -262,10 +272,114 @@
 	w.Write(builder.FinishedBytes())
 }
 
-func HandleRequests(db Database, scoutingServer server.ScoutingServer) {
+// TODO(phil): Can we turn this into a generic?
+func parseRefreshMatchList(w http.ResponseWriter, buf []byte) (*RefreshMatchList, bool) {
+	success := true
+	defer func() {
+		if r := recover(); r != nil {
+			respondWithError(w, http.StatusBadRequest, fmt.Sprintf("Failed to parse RefreshMatchList: %v", r))
+			success = false
+		}
+	}()
+	result := refresh_match_list.GetRootAsRefreshMatchList(buf, 0)
+	return result, success
+}
+
+func parseTeamKey(teamKey string) (int, error) {
+	// TBA prefixes teams with "frc". Not sure why. Get rid of that.
+	teamKey = strings.TrimPrefix(teamKey, "frc")
+	return strconv.Atoi(teamKey)
+}
+
+// Parses the alliance data from the specified match and returns the three red
+// teams and the three blue teams.
+func parseTeamKeys(match *scraping.Match) ([3]int32, [3]int32, error) {
+	redKeys := match.Alliances.Red.TeamKeys
+	blueKeys := match.Alliances.Blue.TeamKeys
+
+	if len(redKeys) != 3 || len(blueKeys) != 3 {
+		return [3]int32{}, [3]int32{}, errors.New(fmt.Sprintf(
+			"Found %d red teams and %d blue teams.", len(redKeys), len(blueKeys)))
+	}
+
+	var red [3]int32
+	for i, key := range redKeys {
+		team, err := parseTeamKey(key)
+		if err != nil {
+			return [3]int32{}, [3]int32{}, errors.New(fmt.Sprintf(
+				"Failed to parse red %d team '%s' as integer: %v", i+1, key, err))
+		}
+		red[i] = int32(team)
+	}
+	var blue [3]int32
+	for i, key := range blueKeys {
+		team, err := parseTeamKey(key)
+		if err != nil {
+			return [3]int32{}, [3]int32{}, errors.New(fmt.Sprintf(
+				"Failed to parse blue %d team '%s' as integer: %v", i+1, key, err))
+		}
+		blue[i] = int32(team)
+	}
+	return red, blue, nil
+}
+
+type refreshMatchListHandler struct {
+	db     Database
+	scrape ScrapeMatchList
+}
+
+func (handler refreshMatchListHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) {
+	requestBytes, err := io.ReadAll(req.Body)
+	if err != nil {
+		respondWithError(w, http.StatusBadRequest, fmt.Sprint("Failed to read request bytes:", err))
+		return
+	}
+
+	request, success := parseRefreshMatchList(w, requestBytes)
+	if !success {
+		return
+	}
+
+	matches, err := handler.scrape(request.Year(), string(request.EventCode()))
+	if err != nil {
+		respondWithError(w, http.StatusInternalServerError, fmt.Sprint("Faled to scrape match list: ", err))
+		return
+	}
+
+	for _, match := range matches {
+		// Make sure the data is valid.
+		red, blue, err := parseTeamKeys(&match)
+		if err != nil {
+			respondWithError(w, http.StatusInternalServerError, fmt.Sprintf(
+				"TheBlueAlliance data for match %d is malformed: %v", match.MatchNumber, err))
+			return
+		}
+		// Add the match to the database.
+		handler.db.AddToMatch(db.Match{
+			MatchNumber: int32(match.MatchNumber),
+			// TODO(phil): What does Round mean?
+			Round:     1,
+			CompLevel: match.CompLevel,
+			R1:        red[0],
+			R2:        red[1],
+			R3:        red[2],
+			B1:        blue[0],
+			B2:        blue[1],
+			B3:        blue[2],
+		})
+	}
+
+	var response RefreshMatchListResponseT
+	builder := flatbuffers.NewBuilder(1024)
+	builder.Finish((&response).Pack(builder))
+	w.Write(builder.FinishedBytes())
+}
+
+func HandleRequests(db Database, scrape ScrapeMatchList, scoutingServer server.ScoutingServer) {
 	scoutingServer.HandleFunc("/requests", unknown)
 	scoutingServer.Handle("/requests/submit/data_scouting", submitDataScoutingHandler{db})
 	scoutingServer.Handle("/requests/request/all_matches", requestAllMatchesHandler{db})
 	scoutingServer.Handle("/requests/request/matches_for_team", requestMatchesForTeamHandler{db})
 	scoutingServer.Handle("/requests/request/data_scouting", requestDataScoutingHandler{db})
+	scoutingServer.Handle("/requests/refresh_match_list", refreshMatchListHandler{db, scrape})
 }
diff --git a/scouting/webserver/requests/requests_test.go b/scouting/webserver/requests/requests_test.go
index e3650ff..999e955 100644
--- a/scouting/webserver/requests/requests_test.go
+++ b/scouting/webserver/requests/requests_test.go
@@ -8,8 +8,11 @@
 	"testing"
 
 	"github.com/frc971/971-Robot-Code/scouting/db"
+	"github.com/frc971/971-Robot-Code/scouting/scraping"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/debug"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/error_response"
+	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/refresh_match_list"
+	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/refresh_match_list_response"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/request_all_matches"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/request_all_matches_response"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests/messages/request_data_scouting"
@@ -26,7 +29,7 @@
 func Test404(t *testing.T) {
 	db := MockDatabase{}
 	scoutingServer := server.NewScoutingServer()
-	HandleRequests(&db, scoutingServer)
+	HandleRequests(&db, scrapeEmtpyMatchList, scoutingServer)
 	scoutingServer.Start(8080)
 	defer scoutingServer.Stop()
 
@@ -43,7 +46,7 @@
 func TestSubmitDataScoutingError(t *testing.T) {
 	db := MockDatabase{}
 	scoutingServer := server.NewScoutingServer()
-	HandleRequests(&db, scoutingServer)
+	HandleRequests(&db, scrapeEmtpyMatchList, scoutingServer)
 	scoutingServer.Start(8080)
 	defer scoutingServer.Stop()
 
@@ -71,7 +74,7 @@
 func TestSubmitDataScouting(t *testing.T) {
 	db := MockDatabase{}
 	scoutingServer := server.NewScoutingServer()
-	HandleRequests(&db, scoutingServer)
+	HandleRequests(&db, scrapeEmtpyMatchList, scoutingServer)
 	scoutingServer.Start(8080)
 	defer scoutingServer.Stop()
 
@@ -119,7 +122,7 @@
 		},
 	}
 	scoutingServer := server.NewScoutingServer()
-	HandleRequests(&db, scoutingServer)
+	HandleRequests(&db, scrapeEmtpyMatchList, scoutingServer)
 	scoutingServer.Start(8080)
 	defer scoutingServer.Stop()
 
@@ -174,7 +177,7 @@
 		},
 	}
 	scoutingServer := server.NewScoutingServer()
-	HandleRequests(&db, scoutingServer)
+	HandleRequests(&db, scrapeEmtpyMatchList, scoutingServer)
 	scoutingServer.Start(8080)
 	defer scoutingServer.Stop()
 
@@ -227,7 +230,7 @@
 		},
 	}
 	scoutingServer := server.NewScoutingServer()
-	HandleRequests(&db, scoutingServer)
+	HandleRequests(&db, scrapeEmtpyMatchList, scoutingServer)
 	scoutingServer.Start(8080)
 	defer scoutingServer.Stop()
 
@@ -269,6 +272,79 @@
 	}
 }
 
+// Validates that we can download the schedule from The Blue Alliance.
+func TestRefreshMatchList(t *testing.T) {
+	scrapeMockSchedule := func(int32, string) ([]scraping.Match, error) {
+		return []scraping.Match{
+			{
+				CompLevel:   "qual",
+				MatchNumber: 1,
+				Alliances: scraping.Alliances{
+					Red: scraping.Alliance{
+						TeamKeys: []string{
+							"100",
+							"200",
+							"300",
+						},
+					},
+					Blue: scraping.Alliance{
+						TeamKeys: []string{
+							"101",
+							"201",
+							"301",
+						},
+					},
+				},
+				WinningAlliance: "",
+				EventKey:        "",
+				Time:            0,
+				PredictedTime:   0,
+				ActualTime:      0,
+				PostResultTime:  0,
+				ScoreBreakdowns: scraping.ScoreBreakdowns{},
+			},
+		}, nil
+	}
+
+	database := MockDatabase{}
+	scoutingServer := server.NewScoutingServer()
+	HandleRequests(&database, scrapeMockSchedule, scoutingServer)
+	scoutingServer.Start(8080)
+	defer scoutingServer.Stop()
+
+	builder := flatbuffers.NewBuilder(1024)
+	builder.Finish((&refresh_match_list.RefreshMatchListT{}).Pack(builder))
+
+	response, err := debug.RefreshMatchList("http://localhost:8080", builder.FinishedBytes())
+	if err != nil {
+		t.Fatal("Failed to request all matches: ", err)
+	}
+
+	// Validate the response.
+	expected := refresh_match_list_response.RefreshMatchListResponseT{}
+	if !reflect.DeepEqual(expected, *response) {
+		t.Fatal("Expected ", expected, ", but got ", *response)
+	}
+
+	// Make sure that the data made it into the database.
+	expectedMatches := []db.Match{
+		{
+			MatchNumber: 1,
+			Round:       1,
+			CompLevel:   "qual",
+			R1:          100,
+			R2:          200,
+			R3:          300,
+			B1:          101,
+			B2:          201,
+			B3:          301,
+		},
+	}
+	if !reflect.DeepEqual(expectedMatches, database.matches) {
+		t.Fatal("Expected ", expectedMatches, ", but got ", database.matches)
+	}
+}
+
 // A mocked database we can use for testing. Add functionality to this as
 // needed for your tests.
 
@@ -277,7 +353,8 @@
 	stats   []db.Stats
 }
 
-func (database *MockDatabase) AddToMatch(db.Match) error {
+func (database *MockDatabase) AddToMatch(match db.Match) error {
+	database.matches = append(database.matches, match)
 	return nil
 }
 
@@ -309,3 +386,8 @@
 func (database *MockDatabase) QueryStats(int) ([]db.Stats, error) {
 	return []db.Stats{}, nil
 }
+
+// Returns an empty match list from the fake The Blue Alliance scraping.
+func scrapeEmtpyMatchList(int32, string) ([]scraping.Match, error) {
+	return nil, nil
+}