scouting: Call ranking scraper

Signed-off-by: Yash Chainani <yashchainani28@gmail.com>
Change-Id: Ife105e949783587c4fb5f74eb77d4c14a1aabe22
diff --git a/scouting/scraping/scrape.go b/scouting/scraping/scrape.go
index 19426cf..625157a 100644
--- a/scouting/scraping/scrape.go
+++ b/scouting/scraping/scrape.go
@@ -13,8 +13,10 @@
 
 // Stores the TBA API key to access the API.
 type scrapingConfig struct {
-	ApiKey  string `json:"api_key"`
-	BaseUrl string `json:"base_url"`
+	ApiKey    string `json:"api_key"`
+	BaseUrl   string `json:"base_url"`
+	Year      int32  `json:"year"`
+	EventCode string `json:"event_code"`
 }
 
 // Takes in year and FIRST event code and returns requested information according to TBA.
@@ -47,9 +49,15 @@
 	if config.BaseUrl == "" {
 		config.BaseUrl = "https://www.thebluealliance.com"
 	}
+	if config.Year == 0 {
+		config.Year = year
+	}
+	if config.EventCode == "" {
+		config.EventCode = eventCode
+	}
 
 	// Create the TBA event key for the year and event code.
-	eventKey := strconv.Itoa(int(year)) + eventCode
+	eventKey := strconv.Itoa(int(config.Year)) + config.EventCode
 
 	// Create a get request for the match info.
 	req, err := http.NewRequest("GET", config.BaseUrl+"/api/v3/event/"+eventKey+"/"+category, nil)
diff --git a/scouting/webserver/BUILD b/scouting/webserver/BUILD
index 745852a..3df423e 100644
--- a/scouting/webserver/BUILD
+++ b/scouting/webserver/BUILD
@@ -9,6 +9,7 @@
     deps = [
         "//scouting/db",
         "//scouting/scraping",
+        "//scouting/webserver/rankings",
         "//scouting/webserver/requests",
         "//scouting/webserver/server",
         "//scouting/webserver/static",
diff --git a/scouting/webserver/main.go b/scouting/webserver/main.go
index 8f4298b..5d4ab01 100644
--- a/scouting/webserver/main.go
+++ b/scouting/webserver/main.go
@@ -15,6 +15,7 @@
 
 	"github.com/frc971/971-Robot-Code/scouting/db"
 	"github.com/frc971/971-Robot-Code/scouting/scraping"
+	"github.com/frc971/971-Robot-Code/scouting/webserver/rankings"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/requests"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/server"
 	"github.com/frc971/971-Robot-Code/scouting/webserver/static"
@@ -131,6 +132,9 @@
 	scoutingServer.Start(*portPtr)
 	fmt.Println("Serving", *dirPtr, "on port", *portPtr)
 
+	scraper := rankings.RankingScraper{}
+	scraper.Start(database, 0, "", *blueAllianceConfigPtr)
+
 	// Block until the user hits Ctrl-C.
 	sigint := make(chan os.Signal, 1)
 	signal.Notify(sigint, syscall.SIGINT)
@@ -140,5 +144,6 @@
 
 	fmt.Println("Shutting down.")
 	scoutingServer.Stop()
+	scraper.Stop()
 	fmt.Println("Successfully shut down.")
 }
diff --git a/scouting/webserver/rankings/rankings.go b/scouting/webserver/rankings/rankings.go
index 064aa13..6e63c0a 100644
--- a/scouting/webserver/rankings/rankings.go
+++ b/scouting/webserver/rankings/rankings.go
@@ -9,7 +9,7 @@
 	"time"
 )
 
-type rankingScraper struct {
+type RankingScraper struct {
 	doneChan     chan<- bool
 	checkStopped chan<- bool
 }
@@ -52,7 +52,7 @@
 	}
 }
 
-func (scraper *rankingScraper) Start(database Database, year int32, eventCode string, blueAllianceConfig string) {
+func (scraper *RankingScraper) Start(database Database, year int32, eventCode string, blueAllianceConfig string) {
 	scraper.doneChan = make(chan bool, 1)
 	scraper.checkStopped = make(chan bool, 1)
 
@@ -79,7 +79,7 @@
 	}(database, year, eventCode)
 }
 
-func (scraper *rankingScraper) Stop() {
+func (scraper *RankingScraper) Stop() {
 	scraper.doneChan <- true
 
 	for {
diff --git a/scouting/webserver/rankings/rankings_test.go b/scouting/webserver/rankings/rankings_test.go
index f47c0a2..aa23c76 100644
--- a/scouting/webserver/rankings/rankings_test.go
+++ b/scouting/webserver/rankings/rankings_test.go
@@ -30,13 +30,13 @@
 
 func TestGetRankings(t *testing.T) {
 	database := MockDatabase{}
-	scraper := rankingScraper{}
+	scraper := RankingScraper{}
 	tbaServer := server.NewScoutingServer()
 	tbaServer.Handle("/", ServeRankings(http.FileServer(http.Dir("../../"))))
 	tbaServer.Start(8000)
 	defer tbaServer.Stop()
 
-	scraper.Start(&database, 2016, "nytr", "scouting_test_config.json")
+	scraper.Start(&database, 0, "", "scouting_test_config.json")
 	defer scraper.Stop()
 
 	for {
diff --git a/scouting/webserver/rankings/scouting_test_config.json b/scouting/webserver/rankings/scouting_test_config.json
index 19a1b4f..40a7747 100644
--- a/scouting/webserver/rankings/scouting_test_config.json
+++ b/scouting/webserver/rankings/scouting_test_config.json
@@ -1,4 +1,6 @@
 {
      "api_key": "dummy_key_that_is_not_actually_used_in_this_test",
-     "base_url": "http://localhost:8000"
+     "base_url": "http://localhost:8000",
+     "year": 2022,
+     "event_code": "CMPTX"
 }
\ No newline at end of file