Refactor some code for scraping thebluealliance.com in the background

I want to make it so that the match list is imported automatically.
Right now we have to manually refresh the match list as elimination
matches happen.

This patch refactors the ranking scraping code so that we can add a
match list scraper easily in a future patch.

I made a few things generic so that it'll be easier to reuse the code
for more things later.

Signed-off-by: Philipp Schrader <philipp.schrader@gmail.com>
Change-Id: Ia8164b62deceddeda683ac3080e99e0fe1b5904a
diff --git a/scouting/scraping/background/BUILD b/scouting/scraping/background/BUILD
new file mode 100644
index 0000000..9aa92c9
--- /dev/null
+++ b/scouting/scraping/background/BUILD
@@ -0,0 +1,9 @@
+load("@io_bazel_rules_go//go:def.bzl", "go_library")
+
+go_library(
+    name = "background",
+    srcs = ["background.go"],
+    importpath = "github.com/frc971/971-Robot-Code/scouting/scraping/background",
+    target_compatible_with = ["@platforms//cpu:x86_64"],
+    visibility = ["//visibility:public"],
+)
diff --git a/scouting/scraping/background/background.go b/scouting/scraping/background/background.go
new file mode 100644
index 0000000..5af8c3e
--- /dev/null
+++ b/scouting/scraping/background/background.go
@@ -0,0 +1,52 @@
+package background
+
+import (
+	"time"
+)
+
+// A helper to run a function in the background every ~10 minutes. Technically
+// can be used for a lot of different things, but is primarily geared towards
+// scraping thebluealliance.com.
+type BackgroundScraper struct {
+	doneChan     chan<- bool
+	checkStopped chan<- bool
+}
+
+func (scraper *BackgroundScraper) Start(scrape func()) {
+	scraper.doneChan = make(chan bool, 1)
+	scraper.checkStopped = make(chan bool, 1)
+
+	go func() {
+		// Setting start time to 11 minutes prior so getRankings called instantly when Start() called
+		startTime := time.Now().Add(-11 * time.Minute)
+		for {
+			curTime := time.Now()
+			diff := curTime.Sub(startTime)
+
+			if diff.Minutes() > 10 {
+				scrape()
+				startTime = curTime
+			}
+
+			if len(scraper.doneChan) != 0 {
+				break
+			}
+
+			time.Sleep(time.Second)
+		}
+
+		scraper.checkStopped <- true
+	}()
+}
+
+func (scraper *BackgroundScraper) Stop() {
+	scraper.doneChan <- true
+
+	for {
+		if len(scraper.checkStopped) != 0 {
+			close(scraper.doneChan)
+			close(scraper.checkStopped)
+			break
+		}
+	}
+}
diff --git a/scouting/scraping/scrape.go b/scouting/scraping/scrape.go
index 625157a..b905465 100644
--- a/scouting/scraping/scrape.go
+++ b/scouting/scraping/scrape.go
@@ -89,36 +89,17 @@
 	return bodyBytes, nil
 }
 
-// Return all matches in event according to TBA
-func AllMatches(year int32, eventCode, configPath string) ([]Match, error) {
-	bodyBytes, err := getJson(year, eventCode, configPath, "matches")
-
+func GetAllData[T interface{}](year int32, eventCode, configPath string, category string) (T, error) {
+	var result T
+	bodyBytes, err := getJson(year, eventCode, configPath, category)
 	if err != nil {
-		return nil, err
+		return result, err
 	}
 
-	var matches []Match
-	// Unmarshal json into go usable format.
-	if err := json.Unmarshal([]byte(bodyBytes), &matches); err != nil {
-		return nil, errors.New(fmt.Sprint("Failed to parse JSON received from TBA: ", err))
+	// Unmarshal the JSON data into the in-memory format.
+	if err = json.Unmarshal([]byte(bodyBytes), &result); err != nil {
+		return result, errors.New(fmt.Sprint("Failed to parse ", category, " JSON received from TBA: ", err))
 	}
 
-	return matches, nil
-}
-
-// Return event rankings according to TBA
-func AllRankings(year int32, eventCode, configPath string) (EventRanking, error) {
-	bodyBytes, err := getJson(year, eventCode, configPath, "rankings")
-
-	if err != nil {
-		return EventRanking{}, err
-	}
-
-	var rankings EventRanking
-	// Unmarshal json into go usable format.
-	if err := json.Unmarshal([]byte(bodyBytes), &rankings); err != nil {
-		return EventRanking{}, errors.New(fmt.Sprint("Failed to parse JSON received from TBA: ", err))
-	}
-
-	return rankings, nil
+	return result, nil
 }
diff --git a/scouting/scraping/scraping_demo.go b/scouting/scraping/scraping_demo.go
index 69cdbff..0c58612 100644
--- a/scouting/scraping/scraping_demo.go
+++ b/scouting/scraping/scraping_demo.go
@@ -11,6 +11,25 @@
 	"github.com/frc971/971-Robot-Code/scouting/scraping"
 )
 
+func dumpData[T interface{}](jsonPtr *bool, category string) {
+	// Get all the data.
+	data, err := scraping.GetAllData[T](2016, "nytr", "", category)
+	if err != nil {
+		log.Fatal("Failed to scrape ", category, " data: ", err)
+	}
+
+	// Dump the data.
+	if *jsonPtr {
+		jsonData, err := json.MarshalIndent(data, "", "  ")
+		if err != nil {
+			log.Fatal("Failed to turn ranking list into JSON: ", err)
+		}
+		fmt.Println(string(jsonData))
+	} else {
+		spew.Dump(data)
+	}
+}
+
 func main() {
 	jsonPtr := flag.Bool("json", false, "If set, dump as JSON, rather than Go debug output.")
 	demoCategory := flag.String("category", "matches", "Decide whether to demo matches or rankings.")
@@ -18,38 +37,8 @@
 	flag.Parse()
 
 	if *demoCategory == "rankings" {
-		// Get all the rankings.
-		rankings, err := scraping.AllRankings(2016, "nytr", "")
-		if err != nil {
-			log.Fatal("Failed to scrape ranking list: ", err)
-		}
-
-		// Dump the rankings.
-		if *jsonPtr {
-			jsonData, err := json.MarshalIndent(rankings, "", "  ")
-			if err != nil {
-				log.Fatal("Failed to turn ranking list into JSON: ", err)
-			}
-			fmt.Println(string(jsonData))
-		} else {
-			spew.Dump(rankings)
-		}
+		dumpData[scraping.EventRanking](jsonPtr, "rankings")
 	} else if *demoCategory == "matches" {
-		// Get all the matches.
-		matches, err := scraping.AllMatches(2016, "nytr", "")
-		if err != nil {
-			log.Fatal("Failed to scrape match list: ", err)
-		}
-
-		// Dump the matches.
-		if *jsonPtr {
-			jsonData, err := json.MarshalIndent(matches, "", "  ")
-			if err != nil {
-				log.Fatal("Failed to turn match list into JSON: ", err)
-			}
-			fmt.Println(string(jsonData))
-		} else {
-			spew.Dump(matches)
-		}
+		dumpData[[]scraping.Match](jsonPtr, "matches")
 	}
 }