Refactor some code for scraping thebluealliance.com in the background

I want to make it so that the match list is imported automatically.
Right now we have to manually refresh the match list as elimination
matches happen.

This patch refactors the ranking scraping code so that we can add a
match list scraper easily in a future patch.

I made a few things generic so that it'll be easier to reuse the code
for more things later.

Signed-off-by: Philipp Schrader <philipp.schrader@gmail.com>
Change-Id: Ia8164b62deceddeda683ac3080e99e0fe1b5904a
diff --git a/scouting/scraping/background/BUILD b/scouting/scraping/background/BUILD
new file mode 100644
index 0000000..9aa92c9
--- /dev/null
+++ b/scouting/scraping/background/BUILD
@@ -0,0 +1,9 @@
+load("@io_bazel_rules_go//go:def.bzl", "go_library")
+
+go_library(
+    name = "background",
+    srcs = ["background.go"],
+    importpath = "github.com/frc971/971-Robot-Code/scouting/scraping/background",
+    target_compatible_with = ["@platforms//cpu:x86_64"],
+    visibility = ["//visibility:public"],
+)
diff --git a/scouting/scraping/background/background.go b/scouting/scraping/background/background.go
new file mode 100644
index 0000000..5af8c3e
--- /dev/null
+++ b/scouting/scraping/background/background.go
@@ -0,0 +1,52 @@
+package background
+
+import (
+	"time"
+)
+
+// A helper to run a function in the background every ~10 minutes. Technically
+// can be used for a lot of different things, but is primarily geared towards
+// scraping thebluealliance.com.
+type BackgroundScraper struct {
+	doneChan     chan<- bool
+	checkStopped chan<- bool
+}
+
+func (scraper *BackgroundScraper) Start(scrape func()) {
+	scraper.doneChan = make(chan bool, 1)
+	scraper.checkStopped = make(chan bool, 1)
+
+	go func() {
+		// Setting start time to 11 minutes prior so getRankings called instantly when Start() called
+		startTime := time.Now().Add(-11 * time.Minute)
+		for {
+			curTime := time.Now()
+			diff := curTime.Sub(startTime)
+
+			if diff.Minutes() > 10 {
+				scrape()
+				startTime = curTime
+			}
+
+			if len(scraper.doneChan) != 0 {
+				break
+			}
+
+			time.Sleep(time.Second)
+		}
+
+		scraper.checkStopped <- true
+	}()
+}
+
+func (scraper *BackgroundScraper) Stop() {
+	scraper.doneChan <- true
+
+	for {
+		if len(scraper.checkStopped) != 0 {
+			close(scraper.doneChan)
+			close(scraper.checkStopped)
+			break
+		}
+	}
+}
diff --git a/scouting/scraping/scrape.go b/scouting/scraping/scrape.go
index 625157a..b905465 100644
--- a/scouting/scraping/scrape.go
+++ b/scouting/scraping/scrape.go
@@ -89,36 +89,17 @@
 	return bodyBytes, nil
 }
 
-// Return all matches in event according to TBA
-func AllMatches(year int32, eventCode, configPath string) ([]Match, error) {
-	bodyBytes, err := getJson(year, eventCode, configPath, "matches")
-
+func GetAllData[T interface{}](year int32, eventCode, configPath string, category string) (T, error) {
+	var result T
+	bodyBytes, err := getJson(year, eventCode, configPath, category)
 	if err != nil {
-		return nil, err
+		return result, err
 	}
 
-	var matches []Match
-	// Unmarshal json into go usable format.
-	if err := json.Unmarshal([]byte(bodyBytes), &matches); err != nil {
-		return nil, errors.New(fmt.Sprint("Failed to parse JSON received from TBA: ", err))
+	// Unmarshal the JSON data into the in-memory format.
+	if err = json.Unmarshal([]byte(bodyBytes), &result); err != nil {
+		return result, errors.New(fmt.Sprint("Failed to parse ", category, " JSON received from TBA: ", err))
 	}
 
-	return matches, nil
-}
-
-// Return event rankings according to TBA
-func AllRankings(year int32, eventCode, configPath string) (EventRanking, error) {
-	bodyBytes, err := getJson(year, eventCode, configPath, "rankings")
-
-	if err != nil {
-		return EventRanking{}, err
-	}
-
-	var rankings EventRanking
-	// Unmarshal json into go usable format.
-	if err := json.Unmarshal([]byte(bodyBytes), &rankings); err != nil {
-		return EventRanking{}, errors.New(fmt.Sprint("Failed to parse JSON received from TBA: ", err))
-	}
-
-	return rankings, nil
+	return result, nil
 }
diff --git a/scouting/scraping/scraping_demo.go b/scouting/scraping/scraping_demo.go
index 69cdbff..0c58612 100644
--- a/scouting/scraping/scraping_demo.go
+++ b/scouting/scraping/scraping_demo.go
@@ -11,6 +11,25 @@
 	"github.com/frc971/971-Robot-Code/scouting/scraping"
 )
 
+func dumpData[T interface{}](jsonPtr *bool, category string) {
+	// Get all the data.
+	data, err := scraping.GetAllData[T](2016, "nytr", "", category)
+	if err != nil {
+		log.Fatal("Failed to scrape ", category, " data: ", err)
+	}
+
+	// Dump the data.
+	if *jsonPtr {
+		jsonData, err := json.MarshalIndent(data, "", "  ")
+		if err != nil {
+			log.Fatal("Failed to turn ranking list into JSON: ", err)
+		}
+		fmt.Println(string(jsonData))
+	} else {
+		spew.Dump(data)
+	}
+}
+
 func main() {
 	jsonPtr := flag.Bool("json", false, "If set, dump as JSON, rather than Go debug output.")
 	demoCategory := flag.String("category", "matches", "Decide whether to demo matches or rankings.")
@@ -18,38 +37,8 @@
 	flag.Parse()
 
 	if *demoCategory == "rankings" {
-		// Get all the rankings.
-		rankings, err := scraping.AllRankings(2016, "nytr", "")
-		if err != nil {
-			log.Fatal("Failed to scrape ranking list: ", err)
-		}
-
-		// Dump the rankings.
-		if *jsonPtr {
-			jsonData, err := json.MarshalIndent(rankings, "", "  ")
-			if err != nil {
-				log.Fatal("Failed to turn ranking list into JSON: ", err)
-			}
-			fmt.Println(string(jsonData))
-		} else {
-			spew.Dump(rankings)
-		}
+		dumpData[scraping.EventRanking](jsonPtr, "rankings")
 	} else if *demoCategory == "matches" {
-		// Get all the matches.
-		matches, err := scraping.AllMatches(2016, "nytr", "")
-		if err != nil {
-			log.Fatal("Failed to scrape match list: ", err)
-		}
-
-		// Dump the matches.
-		if *jsonPtr {
-			jsonData, err := json.MarshalIndent(matches, "", "  ")
-			if err != nil {
-				log.Fatal("Failed to turn match list into JSON: ", err)
-			}
-			fmt.Println(string(jsonData))
-		} else {
-			spew.Dump(matches)
-		}
+		dumpData[[]scraping.Match](jsonPtr, "matches")
 	}
 }
diff --git a/scouting/webserver/BUILD b/scouting/webserver/BUILD
index 3df423e..e1ab726 100644
--- a/scouting/webserver/BUILD
+++ b/scouting/webserver/BUILD
@@ -9,6 +9,7 @@
     deps = [
         "//scouting/db",
         "//scouting/scraping",
+        "//scouting/scraping/background",
         "//scouting/webserver/rankings",
         "//scouting/webserver/requests",
         "//scouting/webserver/server",
diff --git a/scouting/webserver/main.go b/scouting/webserver/main.go
index d2fbdfe..77cc3c4 100644
--- a/scouting/webserver/main.go
+++ b/scouting/webserver/main.go
@@ -15,6 +15,7 @@
 
 	"github.com/frc971/971-Robot-Code/scouting/db"
 	"github.com/frc971/971-Robot-Code/scouting/scraping"
+	"github.com/frc971/971-Robot-Code/scouting/scraping/background"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/rankings"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/server"
@@ -123,7 +124,7 @@
 		if *blueAllianceConfigPtr == "" {
 			return nil, errors.New("Cannot scrape TBA's match list without a config file.")
 		}
-		return scraping.AllMatches(year, eventCode, *blueAllianceConfigPtr)
+		return scraping.GetAllData[[]scraping.Match](year, eventCode, *blueAllianceConfigPtr, "matches")
 	}
 
 	scoutingServer := server.NewScoutingServer()
@@ -132,8 +133,10 @@
 	scoutingServer.Start(*portPtr)
 	fmt.Println("Serving", *dirPtr, "on port", *portPtr)
 
-	scraper := rankings.RankingScraper{}
-	scraper.Start(database, 0, "", *blueAllianceConfigPtr)
+	rankingsScraper := background.BackgroundScraper{}
+	rankingsScraper.Start(func() {
+		rankings.GetRankings(database, 0, "", *blueAllianceConfigPtr)
+	})
 
 	// Block until the user hits Ctrl-C.
 	sigint := make(chan os.Signal, 1)
@@ -144,6 +147,6 @@
 
 	fmt.Println("Shutting down.")
 	scoutingServer.Stop()
-	scraper.Stop()
+	rankingsScraper.Stop()
 	fmt.Println("Successfully shut down.")
 }
diff --git a/scouting/webserver/rankings/BUILD b/scouting/webserver/rankings/BUILD
index c74f88f..4696d26 100644
--- a/scouting/webserver/rankings/BUILD
+++ b/scouting/webserver/rankings/BUILD
@@ -21,6 +21,7 @@
     embed = [":rankings"],
     deps = [
         "//scouting/db",
+        "//scouting/scraping/background",
         "//scouting/webserver/server",
     ],
 )
diff --git a/scouting/webserver/rankings/rankings.go b/scouting/webserver/rankings/rankings.go
index 6e63c0a..0d20b54 100644
--- a/scouting/webserver/rankings/rankings.go
+++ b/scouting/webserver/rankings/rankings.go
@@ -6,14 +6,8 @@
 	"log"
 	"strconv"
 	"strings"
-	"time"
 )
 
-type RankingScraper struct {
-	doneChan     chan<- bool
-	checkStopped chan<- bool
-}
-
 type Database interface {
 	AddOrUpdateRankings(db.Ranking) error
 }
@@ -24,8 +18,8 @@
 	return strconv.Atoi(teamKey)
 }
 
-func getRankings(database Database, year int32, eventCode string, blueAllianceConfig string) {
-	rankings, err := scraping.AllRankings(year, eventCode, blueAllianceConfig)
+func GetRankings(database Database, year int32, eventCode string, blueAllianceConfig string) {
+	rankings, err := scraping.GetAllData[scraping.EventRanking](year, eventCode, blueAllianceConfig, "rankings")
 	if err != nil {
 		log.Println("Failed to scrape ranking list: ", err)
 		return
@@ -51,42 +45,3 @@
 		}
 	}
 }
-
-func (scraper *RankingScraper) Start(database Database, year int32, eventCode string, blueAllianceConfig string) {
-	scraper.doneChan = make(chan bool, 1)
-	scraper.checkStopped = make(chan bool, 1)
-
-	go func(database Database, year int32, eventCode string) {
-		// Setting start time to 11 minutes prior so getRankings called instantly when Start() called
-		startTime := time.Now().Add(-11 * time.Minute)
-		for {
-			curTime := time.Now()
-			diff := curTime.Sub(startTime)
-
-			if diff.Minutes() > 10 {
-				getRankings(database, year, eventCode, blueAllianceConfig)
-				startTime = curTime
-			}
-
-			if len(scraper.doneChan) != 0 {
-				break
-			}
-
-			time.Sleep(time.Second)
-		}
-
-		scraper.checkStopped <- true
-	}(database, year, eventCode)
-}
-
-func (scraper *RankingScraper) Stop() {
-	scraper.doneChan <- true
-
-	for {
-		if len(scraper.checkStopped) != 0 {
-			close(scraper.doneChan)
-			close(scraper.checkStopped)
-			break
-		}
-	}
-}
diff --git a/scouting/webserver/rankings/rankings_test.go b/scouting/webserver/rankings/rankings_test.go
index aa23c76..6f8af3b 100644
--- a/scouting/webserver/rankings/rankings_test.go
+++ b/scouting/webserver/rankings/rankings_test.go
@@ -2,9 +2,11 @@
 
 import (
 	"github.com/frc971/971-Robot-Code/scouting/db"
+	"github.com/frc971/971-Robot-Code/scouting/scraping/background"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/server"
 	"net/http"
 	"reflect"
+	"strings"
 	"testing"
 	"time"
 )
@@ -18,8 +20,13 @@
 	return nil
 }
 
-func ServeRankings(h http.Handler) http.Handler {
+func ServeRankings(t *testing.T, h http.Handler) http.Handler {
 	fn := func(w http.ResponseWriter, r *http.Request) {
+		// Make sure that the rankings are requested properly.
+		if !strings.HasSuffix(r.URL.Path, "/2016nytr/rankings") {
+			t.Error("Got unexpected URL: ", r.URL.Path)
+		}
+
 		r.URL.Path = "scraping/test_data/2016_nytr_rankings.json"
 
 		h.ServeHTTP(w, r)
@@ -30,13 +37,15 @@
 
 func TestGetRankings(t *testing.T) {
 	database := MockDatabase{}
-	scraper := RankingScraper{}
+	scraper := background.BackgroundScraper{}
 	tbaServer := server.NewScoutingServer()
-	tbaServer.Handle("/", ServeRankings(http.FileServer(http.Dir("../../"))))
+	tbaServer.Handle("/", ServeRankings(t, http.FileServer(http.Dir("../../"))))
 	tbaServer.Start(8000)
 	defer tbaServer.Stop()
 
-	scraper.Start(&database, 0, "", "scouting_test_config.json")
+	scraper.Start(func() {
+		GetRankings(&database, 0, "", "scouting_test_config.json")
+	})
 	defer scraper.Stop()
 
 	for {
diff --git a/scouting/webserver/rankings/scouting_test_config.json b/scouting/webserver/rankings/scouting_test_config.json
index 40a7747..6bc4fec 100644
--- a/scouting/webserver/rankings/scouting_test_config.json
+++ b/scouting/webserver/rankings/scouting_test_config.json
@@ -1,6 +1,6 @@
 {
      "api_key": "dummy_key_that_is_not_actually_used_in_this_test",
      "base_url": "http://localhost:8000",
-     "year": 2022,
-     "event_code": "CMPTX"
-}
\ No newline at end of file
+     "year": 2016,
+     "event_code": "nytr"
+}