scouting: Add an endpoint for populating the match schedule
This patch combines the scraping library with the scouting webserver.
There's now also a new end point for the web page (or debug CLI tool)
to ask the server to fetch the match list. The end point is
`/requests/refresh_match_list`.
All the tests are updated. The `cli_test` downloads a 2016 ny_tr match
list that I downloaded from TBA. It should be a decent integration
test as it uses representative data.
Signed-off-by: Philipp Schrader <philipp.schrader@gmail.com>
Change-Id: I6c540590521b00887eb2ddde2a9369875c659551
diff --git a/scouting/scraping/scraping_demo.go b/scouting/scraping/scraping_demo.go
index 1d727f3..0ea3e53 100644
--- a/scouting/scraping/scraping_demo.go
+++ b/scouting/scraping/scraping_demo.go
@@ -2,6 +2,9 @@
// To run the demo, ensure that you have a file named scouting_config.json at the workspace root with your TBA api key in it.
import (
+ "encoding/json"
+ "flag"
+ "fmt"
"log"
"github.com/davecgh/go-spew/spew"
@@ -9,12 +12,23 @@
)
func main() {
+ jsonPtr := flag.Bool("json", false, "If set, dump as JSON, rather than Go debug output.")
+ flag.Parse()
+
// Get all the matches.
- matches, err := scraping.AllMatches("2016", "nytr", "")
- // Fail on error.
+ matches, err := scraping.AllMatches(2016, "nytr", "")
if err != nil {
- log.Fatal("Error:", err.Error)
+ log.Fatal("Failed to scrape match list: ", err)
}
+
// Dump the matches.
- spew.Dump(matches)
+ if *jsonPtr {
+ jsonData, err := json.MarshalIndent(matches, "", " ")
+ if err != nil {
+ log.Fatal("Failed to turn match list into JSON: ", err)
+ }
+ fmt.Println(string(jsonData))
+ } else {
+ spew.Dump(matches)
+ }
}