blob: 6f8af3b46b6c1c3a22ab74440a46f390fb6c3c3c [file] [log] [blame]
Yash Chainani87d06442022-04-16 20:59:47 -07001package rankings
2
3import (
4 "github.com/frc971/971-Robot-Code/scouting/db"
Philipp Schraderc49eaf72023-02-26 16:56:52 -08005 "github.com/frc971/971-Robot-Code/scouting/scraping/background"
Yash Chainani87d06442022-04-16 20:59:47 -07006 "github.com/frc971/971-Robot-Code/scouting/webserver/server"
7 "net/http"
8 "reflect"
Philipp Schraderc49eaf72023-02-26 16:56:52 -08009 "strings"
Yash Chainani87d06442022-04-16 20:59:47 -070010 "testing"
11 "time"
12)
13
14type MockDatabase struct {
15 rankings []db.Ranking
16}
17
18func (database *MockDatabase) AddOrUpdateRankings(data db.Ranking) error {
19 database.rankings = append(database.rankings, data)
20 return nil
21}
22
Philipp Schraderc49eaf72023-02-26 16:56:52 -080023func ServeRankings(t *testing.T, h http.Handler) http.Handler {
Yash Chainani87d06442022-04-16 20:59:47 -070024 fn := func(w http.ResponseWriter, r *http.Request) {
Philipp Schraderc49eaf72023-02-26 16:56:52 -080025 // Make sure that the rankings are requested properly.
26 if !strings.HasSuffix(r.URL.Path, "/2016nytr/rankings") {
27 t.Error("Got unexpected URL: ", r.URL.Path)
28 }
29
Yash Chainani87d06442022-04-16 20:59:47 -070030 r.URL.Path = "scraping/test_data/2016_nytr_rankings.json"
31
32 h.ServeHTTP(w, r)
33 }
34
35 return http.HandlerFunc(fn)
36}
37
38func TestGetRankings(t *testing.T) {
39 database := MockDatabase{}
Philipp Schraderc49eaf72023-02-26 16:56:52 -080040 scraper := background.BackgroundScraper{}
Yash Chainani87d06442022-04-16 20:59:47 -070041 tbaServer := server.NewScoutingServer()
Philipp Schraderc49eaf72023-02-26 16:56:52 -080042 tbaServer.Handle("/", ServeRankings(t, http.FileServer(http.Dir("../../"))))
Yash Chainani87d06442022-04-16 20:59:47 -070043 tbaServer.Start(8000)
44 defer tbaServer.Stop()
45
Philipp Schraderc49eaf72023-02-26 16:56:52 -080046 scraper.Start(func() {
47 GetRankings(&database, 0, "", "scouting_test_config.json")
48 })
Yash Chainani87d06442022-04-16 20:59:47 -070049 defer scraper.Stop()
50
51 for {
52 if len(database.rankings) > 0 {
53 break
54 }
55
56 time.Sleep(time.Second)
57 }
58
59 beginningThreeExpected := []db.Ranking{
60 {TeamNumber: 359, Losses: 1, Wins: 11, Ties: 0, Rank: 1, Dq: 0},
61 {TeamNumber: 5254, Losses: 1, Wins: 11, Ties: 0, Rank: 2, Dq: 0},
62 {TeamNumber: 3990, Losses: 1, Wins: 11, Ties: 0, Rank: 3, Dq: 0},
63 }
64
65 endThreeExpected := []db.Ranking{
66 {TeamNumber: 5943, Losses: 10, Wins: 2, Ties: 0, Rank: 34, Dq: 0},
67 {TeamNumber: 4203, Losses: 10, Wins: 2, Ties: 0, Rank: 35, Dq: 0},
68 {TeamNumber: 5149, Losses: 10, Wins: 2, Ties: 0, Rank: 36, Dq: 0},
69 }
70
71 if !reflect.DeepEqual(beginningThreeExpected, database.rankings[0:3]) {
72 t.Fatal("Got %#v, but expected %#v.", database.rankings[0:3], beginningThreeExpected)
73 }
74
75 if !reflect.DeepEqual(endThreeExpected, database.rankings[33:]) {
76 t.Fatal("Got %#v, but expected %#v.", database.rankings[33:], beginningThreeExpected)
77 }
78}