Philipp Schrader | 5562df7 | 2022-02-16 20:56:51 -0800 | [diff] [blame] | 1 | package static |
| 2 | |
| 3 | // A year agnostic way to serve static http files. |
| 4 | import ( |
Philipp Schrader | 45721a7 | 2022-04-02 16:27:53 -0700 | [diff] [blame^] | 5 | "crypto/sha256" |
| 6 | "errors" |
| 7 | "fmt" |
| 8 | "io" |
| 9 | "log" |
Philipp Schrader | 5562df7 | 2022-02-16 20:56:51 -0800 | [diff] [blame] | 10 | "net/http" |
Philipp Schrader | 45721a7 | 2022-04-02 16:27:53 -0700 | [diff] [blame^] | 11 | "os" |
| 12 | "path/filepath" |
| 13 | "strings" |
Alex Perry | b2f7652 | 2022-03-30 21:02:05 -0700 | [diff] [blame] | 14 | "time" |
Philipp Schrader | 5562df7 | 2022-02-16 20:56:51 -0800 | [diff] [blame] | 15 | |
| 16 | "github.com/frc971/971-Robot-Code/scouting/webserver/server" |
| 17 | ) |
| 18 | |
Alex Perry | b2f7652 | 2022-03-30 21:02:05 -0700 | [diff] [blame] | 19 | // We want the static files (which include JS that is modified over time), to not be cached. |
| 20 | // This ensures users get updated versions when uploaded to the server. |
| 21 | // Based on https://stackoverflow.com/a/33881296, this disables cache for most browsers. |
| 22 | var epoch = time.Unix(0, 0).Format(time.RFC1123) |
| 23 | |
| 24 | var noCacheHeaders = map[string]string{ |
| 25 | "Expires": epoch, |
| 26 | "Cache-Control": "no-cache, private, max-age=0", |
| 27 | "Pragma": "no-cache", |
| 28 | "X-Accel-Expires": "0", |
| 29 | } |
| 30 | |
Philipp Schrader | 45721a7 | 2022-04-02 16:27:53 -0700 | [diff] [blame^] | 31 | func MaybeNoCache(h http.Handler) http.Handler { |
Alex Perry | b2f7652 | 2022-03-30 21:02:05 -0700 | [diff] [blame] | 32 | fn := func(w http.ResponseWriter, r *http.Request) { |
Philipp Schrader | 45721a7 | 2022-04-02 16:27:53 -0700 | [diff] [blame^] | 33 | // We force the browser not to cache index.html so that |
| 34 | // browsers will notice when the bundle gets updated. |
| 35 | if r.URL.Path == "/" || r.URL.Path == "/index.html" { |
| 36 | for k, v := range noCacheHeaders { |
| 37 | w.Header().Set(k, v) |
| 38 | } |
| 39 | } |
| 40 | |
| 41 | h.ServeHTTP(w, r) |
| 42 | } |
| 43 | |
| 44 | return http.HandlerFunc(fn) |
| 45 | } |
| 46 | |
| 47 | // Computes the sha256 of the specified file. |
| 48 | func computeSha256(path string) (string, error) { |
| 49 | file, err := os.Open(path) |
| 50 | if err != nil { |
| 51 | return "", errors.New(fmt.Sprint("Failed to open ", path, ": ", err)) |
| 52 | } |
| 53 | defer file.Close() |
| 54 | |
| 55 | hash := sha256.New() |
| 56 | if _, err := io.Copy(hash, file); err != nil { |
| 57 | return "", errors.New(fmt.Sprint("Failed to compute sha256 of ", path, ": ", err)) |
| 58 | } |
| 59 | return fmt.Sprintf("%x", hash.Sum(nil)), nil |
| 60 | } |
| 61 | |
| 62 | // Finds the checksums for all the files in the specified directory. This is a |
| 63 | // best effort only. If for some reason we fail to compute the checksum of |
| 64 | // something, we just move on. |
| 65 | func findAllFileShas(directory string) map[string]string { |
| 66 | shaSums := make(map[string]string) |
| 67 | |
| 68 | // Find the checksums for all the files. |
| 69 | err := filepath.Walk(directory, func(path string, info os.FileInfo, err error) error { |
| 70 | if err != nil { |
| 71 | log.Println("Walk() didn't want to deal with ", path, ":", err) |
| 72 | return nil |
| 73 | } |
| 74 | if info.IsDir() { |
| 75 | // We only care about computing checksums of files. |
| 76 | // Ignore directories. |
| 77 | return nil |
| 78 | } |
| 79 | hash, err := computeSha256(path) |
| 80 | if err != nil { |
| 81 | log.Println(err) |
| 82 | return nil |
| 83 | } |
| 84 | shaSums[hash] = "/" + strings.TrimPrefix(path, directory) |
| 85 | return nil |
| 86 | }) |
| 87 | if err != nil { |
| 88 | log.Fatal("Got unexpected error from Walk(): ", err) |
| 89 | } |
| 90 | |
| 91 | return shaSums |
| 92 | } |
| 93 | |
| 94 | func HandleShaUrl(directory string, h http.Handler) http.Handler { |
| 95 | shaSums := findAllFileShas(directory) |
| 96 | |
| 97 | fn := func(w http.ResponseWriter, r *http.Request) { |
| 98 | // We expect the path portion to look like this: |
| 99 | // /sha256/<checksum>/path... |
| 100 | // Splitting on / means we end up with this list: |
| 101 | // [0] "" |
| 102 | // [1] "sha256" |
| 103 | // [2] "<checksum>" |
| 104 | // [3-] path... |
| 105 | parts := strings.Split(r.URL.Path, "/") |
| 106 | if len(parts) < 4 { |
| 107 | w.WriteHeader(http.StatusNotFound) |
| 108 | return |
| 109 | } |
| 110 | if parts[0] != "" || parts[1] != "sha256" { |
| 111 | // Something is fundamentally wrong. We told the |
| 112 | // framework to only give is /sha256/ requests. |
| 113 | log.Fatal("This handler should not be called for " + r.URL.Path) |
| 114 | } |
| 115 | hash := parts[2] |
| 116 | if path, ok := shaSums[hash]; ok { |
| 117 | // We found a file with this checksum. Serve that file. |
| 118 | r.URL.Path = path |
| 119 | } else { |
| 120 | // No file with this checksum found. |
| 121 | w.WriteHeader(http.StatusNotFound) |
| 122 | return |
Alex Perry | b2f7652 | 2022-03-30 21:02:05 -0700 | [diff] [blame] | 123 | } |
| 124 | |
| 125 | h.ServeHTTP(w, r) |
| 126 | } |
| 127 | |
| 128 | return http.HandlerFunc(fn) |
| 129 | } |
| 130 | |
Philipp Schrader | 0fa4e93 | 2022-04-02 14:33:48 -0700 | [diff] [blame] | 131 | // Serve pages in the specified directory. |
Philipp Schrader | 5562df7 | 2022-02-16 20:56:51 -0800 | [diff] [blame] | 132 | func ServePages(scoutingServer server.ScoutingServer, directory string) { |
| 133 | // Serve the / endpoint given a folder of pages. |
Philipp Schrader | 45721a7 | 2022-04-02 16:27:53 -0700 | [diff] [blame^] | 134 | scoutingServer.Handle("/", MaybeNoCache(http.FileServer(http.Dir(directory)))) |
| 135 | |
| 136 | // Also serve files in a checksum-addressable manner. |
| 137 | scoutingServer.Handle("/sha256/", HandleShaUrl(directory, http.FileServer(http.Dir(directory)))) |
Philipp Schrader | 5562df7 | 2022-02-16 20:56:51 -0800 | [diff] [blame] | 138 | } |