scouting: Scrape rankings and add to db
Signed-off-by: Yash Chainani <yashchainani28@gmail.com>
Change-Id: I9e84feeaaee640e98ba72919b022379de39f9d29
diff --git a/scouting/webserver/rankings/BUILD b/scouting/webserver/rankings/BUILD
new file mode 100644
index 0000000..c74f88f
--- /dev/null
+++ b/scouting/webserver/rankings/BUILD
@@ -0,0 +1,26 @@
+load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
+
+go_library(
+ name = "rankings",
+ srcs = ["rankings.go"],
+ importpath = "github.com/frc971/971-Robot-Code/scouting/webserver/rankings",
+ visibility = ["//visibility:public"],
+ deps = [
+ "//scouting/db",
+ "//scouting/scraping",
+ ],
+)
+
+go_test(
+ name = "rankings_test",
+ srcs = ["rankings_test.go"],
+ data = [
+ "scouting_test_config.json",
+ "//scouting/scraping:test_data",
+ ],
+ embed = [":rankings"],
+ deps = [
+ "//scouting/db",
+ "//scouting/webserver/server",
+ ],
+)
diff --git a/scouting/webserver/rankings/rankings.go b/scouting/webserver/rankings/rankings.go
new file mode 100644
index 0000000..064aa13
--- /dev/null
+++ b/scouting/webserver/rankings/rankings.go
@@ -0,0 +1,92 @@
+package rankings
+
+import (
+ "github.com/frc971/971-Robot-Code/scouting/db"
+ "github.com/frc971/971-Robot-Code/scouting/scraping"
+ "log"
+ "strconv"
+ "strings"
+ "time"
+)
+
+type rankingScraper struct {
+ doneChan chan<- bool
+ checkStopped chan<- bool
+}
+
+type Database interface {
+ AddOrUpdateRankings(db.Ranking) error
+}
+
+func parseTeamKey(teamKey string) (int, error) {
+ // TBA prefixes teams with "frc". Not sure why. Get rid of that.
+ teamKey = strings.TrimPrefix(teamKey, "frc")
+ return strconv.Atoi(teamKey)
+}
+
+func getRankings(database Database, year int32, eventCode string, blueAllianceConfig string) {
+ rankings, err := scraping.AllRankings(year, eventCode, blueAllianceConfig)
+ if err != nil {
+ log.Println("Failed to scrape ranking list: ", err)
+ return
+ }
+
+ for _, rank := range rankings.Rankings {
+ teamKey, err := parseTeamKey(rank.TeamKey)
+
+ if err != nil {
+ log.Println("Failed to parse team key: ", err)
+ continue
+ }
+
+ rankingInfo := db.Ranking{
+ TeamNumber: teamKey,
+ Losses: rank.Records.Losses, Wins: rank.Records.Wins, Ties: rank.Records.Ties,
+ Rank: rank.Rank, Dq: rank.Dq,
+ }
+ err = database.AddOrUpdateRankings(rankingInfo)
+
+ if err != nil {
+ log.Println("Failed to add or update database: ", err)
+ }
+ }
+}
+
+func (scraper *rankingScraper) Start(database Database, year int32, eventCode string, blueAllianceConfig string) {
+ scraper.doneChan = make(chan bool, 1)
+ scraper.checkStopped = make(chan bool, 1)
+
+ go func(database Database, year int32, eventCode string) {
+ // Setting start time to 11 minutes prior so getRankings called instantly when Start() called
+ startTime := time.Now().Add(-11 * time.Minute)
+ for {
+ curTime := time.Now()
+ diff := curTime.Sub(startTime)
+
+ if diff.Minutes() > 10 {
+ getRankings(database, year, eventCode, blueAllianceConfig)
+ startTime = curTime
+ }
+
+ if len(scraper.doneChan) != 0 {
+ break
+ }
+
+ time.Sleep(time.Second)
+ }
+
+ scraper.checkStopped <- true
+ }(database, year, eventCode)
+}
+
+func (scraper *rankingScraper) Stop() {
+ scraper.doneChan <- true
+
+ for {
+ if len(scraper.checkStopped) != 0 {
+ close(scraper.doneChan)
+ close(scraper.checkStopped)
+ break
+ }
+ }
+}
diff --git a/scouting/webserver/rankings/rankings_test.go b/scouting/webserver/rankings/rankings_test.go
new file mode 100644
index 0000000..f47c0a2
--- /dev/null
+++ b/scouting/webserver/rankings/rankings_test.go
@@ -0,0 +1,69 @@
+package rankings
+
+import (
+ "github.com/frc971/971-Robot-Code/scouting/db"
+ "github.com/frc971/971-Robot-Code/scouting/webserver/server"
+ "net/http"
+ "reflect"
+ "testing"
+ "time"
+)
+
+type MockDatabase struct {
+ rankings []db.Ranking
+}
+
+func (database *MockDatabase) AddOrUpdateRankings(data db.Ranking) error {
+ database.rankings = append(database.rankings, data)
+ return nil
+}
+
+func ServeRankings(h http.Handler) http.Handler {
+ fn := func(w http.ResponseWriter, r *http.Request) {
+ r.URL.Path = "scraping/test_data/2016_nytr_rankings.json"
+
+ h.ServeHTTP(w, r)
+ }
+
+ return http.HandlerFunc(fn)
+}
+
+func TestGetRankings(t *testing.T) {
+ database := MockDatabase{}
+ scraper := rankingScraper{}
+ tbaServer := server.NewScoutingServer()
+ tbaServer.Handle("/", ServeRankings(http.FileServer(http.Dir("../../"))))
+ tbaServer.Start(8000)
+ defer tbaServer.Stop()
+
+ scraper.Start(&database, 2016, "nytr", "scouting_test_config.json")
+ defer scraper.Stop()
+
+ for {
+ if len(database.rankings) > 0 {
+ break
+ }
+
+ time.Sleep(time.Second)
+ }
+
+ beginningThreeExpected := []db.Ranking{
+ {TeamNumber: 359, Losses: 1, Wins: 11, Ties: 0, Rank: 1, Dq: 0},
+ {TeamNumber: 5254, Losses: 1, Wins: 11, Ties: 0, Rank: 2, Dq: 0},
+ {TeamNumber: 3990, Losses: 1, Wins: 11, Ties: 0, Rank: 3, Dq: 0},
+ }
+
+ endThreeExpected := []db.Ranking{
+ {TeamNumber: 5943, Losses: 10, Wins: 2, Ties: 0, Rank: 34, Dq: 0},
+ {TeamNumber: 4203, Losses: 10, Wins: 2, Ties: 0, Rank: 35, Dq: 0},
+ {TeamNumber: 5149, Losses: 10, Wins: 2, Ties: 0, Rank: 36, Dq: 0},
+ }
+
+ if !reflect.DeepEqual(beginningThreeExpected, database.rankings[0:3]) {
+ t.Fatal("Got %#v, but expected %#v.", database.rankings[0:3], beginningThreeExpected)
+ }
+
+ if !reflect.DeepEqual(endThreeExpected, database.rankings[33:]) {
+ t.Fatal("Got %#v, but expected %#v.", database.rankings[33:], beginningThreeExpected)
+ }
+}
diff --git a/scouting/webserver/rankings/scouting_test_config.json b/scouting/webserver/rankings/scouting_test_config.json
new file mode 100644
index 0000000..19a1b4f
--- /dev/null
+++ b/scouting/webserver/rankings/scouting_test_config.json
@@ -0,0 +1,4 @@
+{
+ "api_key": "dummy_key_that_is_not_actually_used_in_this_test",
+ "base_url": "http://localhost:8000"
+}
\ No newline at end of file