Refactor some code for scraping thebluealliance.com in the background
I want to make it so that the match list is imported automatically.
Right now we have to manually refresh the match list as elimination
matches happen.
This patch refactors the ranking scraping code so that we can add a
match list scraper easily in a future patch.
I made a few things generic so that it'll be easier to reuse the code
for more things later.
Signed-off-by: Philipp Schrader <philipp.schrader@gmail.com>
Change-Id: Ia8164b62deceddeda683ac3080e99e0fe1b5904a
diff --git a/scouting/scraping/scraping_demo.go b/scouting/scraping/scraping_demo.go
index 69cdbff..0c58612 100644
--- a/scouting/scraping/scraping_demo.go
+++ b/scouting/scraping/scraping_demo.go
@@ -11,6 +11,25 @@
"github.com/frc971/971-Robot-Code/scouting/scraping"
)
+func dumpData[T interface{}](jsonPtr *bool, category string) {
+ // Get all the data.
+ data, err := scraping.GetAllData[T](2016, "nytr", "", category)
+ if err != nil {
+ log.Fatal("Failed to scrape ", category, " data: ", err)
+ }
+
+ // Dump the data.
+ if *jsonPtr {
+ jsonData, err := json.MarshalIndent(data, "", " ")
+ if err != nil {
+ log.Fatal("Failed to turn ranking list into JSON: ", err)
+ }
+ fmt.Println(string(jsonData))
+ } else {
+ spew.Dump(data)
+ }
+}
+
func main() {
jsonPtr := flag.Bool("json", false, "If set, dump as JSON, rather than Go debug output.")
demoCategory := flag.String("category", "matches", "Decide whether to demo matches or rankings.")
@@ -18,38 +37,8 @@
flag.Parse()
if *demoCategory == "rankings" {
- // Get all the rankings.
- rankings, err := scraping.AllRankings(2016, "nytr", "")
- if err != nil {
- log.Fatal("Failed to scrape ranking list: ", err)
- }
-
- // Dump the rankings.
- if *jsonPtr {
- jsonData, err := json.MarshalIndent(rankings, "", " ")
- if err != nil {
- log.Fatal("Failed to turn ranking list into JSON: ", err)
- }
- fmt.Println(string(jsonData))
- } else {
- spew.Dump(rankings)
- }
+ dumpData[scraping.EventRanking](jsonPtr, "rankings")
} else if *demoCategory == "matches" {
- // Get all the matches.
- matches, err := scraping.AllMatches(2016, "nytr", "")
- if err != nil {
- log.Fatal("Failed to scrape match list: ", err)
- }
-
- // Dump the matches.
- if *jsonPtr {
- jsonData, err := json.MarshalIndent(matches, "", " ")
- if err != nil {
- log.Fatal("Failed to turn match list into JSON: ", err)
- }
- fmt.Println(string(jsonData))
- } else {
- spew.Dump(matches)
- }
+ dumpData[[]scraping.Match](jsonPtr, "matches")
}
}