Philipp Schrader | 5562df7 | 2022-02-16 20:56:51 -0800 | [diff] [blame] | 1 | package static |
| 2 | |
| 3 | // A year agnostic way to serve static http files. |
| 4 | import ( |
Philipp Schrader | 45721a7 | 2022-04-02 16:27:53 -0700 | [diff] [blame] | 5 | "crypto/sha256" |
| 6 | "errors" |
| 7 | "fmt" |
| 8 | "io" |
| 9 | "log" |
Philipp Schrader | 5562df7 | 2022-02-16 20:56:51 -0800 | [diff] [blame] | 10 | "net/http" |
Philipp Schrader | 45721a7 | 2022-04-02 16:27:53 -0700 | [diff] [blame] | 11 | "os" |
| 12 | "path/filepath" |
| 13 | "strings" |
Alex Perry | b2f7652 | 2022-03-30 21:02:05 -0700 | [diff] [blame] | 14 | "time" |
Philipp Schrader | 5562df7 | 2022-02-16 20:56:51 -0800 | [diff] [blame] | 15 | |
| 16 | "github.com/frc971/971-Robot-Code/scouting/webserver/server" |
| 17 | ) |
| 18 | |
Alex Perry | b2f7652 | 2022-03-30 21:02:05 -0700 | [diff] [blame] | 19 | // We want the static files (which include JS that is modified over time), to not be cached. |
| 20 | // This ensures users get updated versions when uploaded to the server. |
| 21 | // Based on https://stackoverflow.com/a/33881296, this disables cache for most browsers. |
| 22 | var epoch = time.Unix(0, 0).Format(time.RFC1123) |
| 23 | |
| 24 | var noCacheHeaders = map[string]string{ |
| 25 | "Expires": epoch, |
| 26 | "Cache-Control": "no-cache, private, max-age=0", |
| 27 | "Pragma": "no-cache", |
| 28 | "X-Accel-Expires": "0", |
| 29 | } |
| 30 | |
Philipp Schrader | 45721a7 | 2022-04-02 16:27:53 -0700 | [diff] [blame] | 31 | func MaybeNoCache(h http.Handler) http.Handler { |
Alex Perry | b2f7652 | 2022-03-30 21:02:05 -0700 | [diff] [blame] | 32 | fn := func(w http.ResponseWriter, r *http.Request) { |
Philipp Schrader | 45721a7 | 2022-04-02 16:27:53 -0700 | [diff] [blame] | 33 | // We force the browser not to cache index.html so that |
| 34 | // browsers will notice when the bundle gets updated. |
| 35 | if r.URL.Path == "/" || r.URL.Path == "/index.html" { |
| 36 | for k, v := range noCacheHeaders { |
| 37 | w.Header().Set(k, v) |
| 38 | } |
| 39 | } |
| 40 | |
| 41 | h.ServeHTTP(w, r) |
| 42 | } |
| 43 | |
| 44 | return http.HandlerFunc(fn) |
| 45 | } |
| 46 | |
| 47 | // Computes the sha256 of the specified file. |
| 48 | func computeSha256(path string) (string, error) { |
| 49 | file, err := os.Open(path) |
| 50 | if err != nil { |
| 51 | return "", errors.New(fmt.Sprint("Failed to open ", path, ": ", err)) |
| 52 | } |
| 53 | defer file.Close() |
| 54 | |
| 55 | hash := sha256.New() |
| 56 | if _, err := io.Copy(hash, file); err != nil { |
| 57 | return "", errors.New(fmt.Sprint("Failed to compute sha256 of ", path, ": ", err)) |
| 58 | } |
| 59 | return fmt.Sprintf("%x", hash.Sum(nil)), nil |
| 60 | } |
| 61 | |
| 62 | // Finds the checksums for all the files in the specified directory. This is a |
| 63 | // best effort only. If for some reason we fail to compute the checksum of |
| 64 | // something, we just move on. |
| 65 | func findAllFileShas(directory string) map[string]string { |
| 66 | shaSums := make(map[string]string) |
| 67 | |
| 68 | // Find the checksums for all the files. |
| 69 | err := filepath.Walk(directory, func(path string, info os.FileInfo, err error) error { |
| 70 | if err != nil { |
| 71 | log.Println("Walk() didn't want to deal with ", path, ":", err) |
| 72 | return nil |
| 73 | } |
| 74 | if info.IsDir() { |
| 75 | // We only care about computing checksums of files. |
| 76 | // Ignore directories. |
| 77 | return nil |
| 78 | } |
| 79 | hash, err := computeSha256(path) |
| 80 | if err != nil { |
| 81 | log.Println(err) |
| 82 | return nil |
| 83 | } |
Philipp Schrader | 67fe6d0 | 2022-04-16 15:37:40 -0700 | [diff] [blame] | 84 | // We want all paths relative to the original search directory. |
| 85 | // That means we remove the search directory from the Walk() |
| 86 | // result. Also make sure that the final path doesn't start |
| 87 | // with a "/" to make it independent of whether "directory" |
| 88 | // ends with a "/" or not. |
| 89 | trimmedPath := strings.TrimPrefix(path, directory) |
| 90 | trimmedPath = strings.TrimPrefix(trimmedPath, "/") |
| 91 | shaSums[hash] = trimmedPath |
Philipp Schrader | 45721a7 | 2022-04-02 16:27:53 -0700 | [diff] [blame] | 92 | return nil |
| 93 | }) |
| 94 | if err != nil { |
| 95 | log.Fatal("Got unexpected error from Walk(): ", err) |
| 96 | } |
| 97 | |
| 98 | return shaSums |
| 99 | } |
| 100 | |
| 101 | func HandleShaUrl(directory string, h http.Handler) http.Handler { |
| 102 | shaSums := findAllFileShas(directory) |
| 103 | |
| 104 | fn := func(w http.ResponseWriter, r *http.Request) { |
| 105 | // We expect the path portion to look like this: |
| 106 | // /sha256/<checksum>/path... |
| 107 | // Splitting on / means we end up with this list: |
| 108 | // [0] "" |
| 109 | // [1] "sha256" |
| 110 | // [2] "<checksum>" |
Philipp Schrader | 67fe6d0 | 2022-04-16 15:37:40 -0700 | [diff] [blame] | 111 | // [3] path... |
| 112 | parts := strings.SplitN(r.URL.Path, "/", 4) |
| 113 | if len(parts) != 4 { |
Philipp Schrader | 45721a7 | 2022-04-02 16:27:53 -0700 | [diff] [blame] | 114 | w.WriteHeader(http.StatusNotFound) |
| 115 | return |
| 116 | } |
| 117 | if parts[0] != "" || parts[1] != "sha256" { |
| 118 | // Something is fundamentally wrong. We told the |
| 119 | // framework to only give is /sha256/ requests. |
| 120 | log.Fatal("This handler should not be called for " + r.URL.Path) |
| 121 | } |
| 122 | hash := parts[2] |
| 123 | if path, ok := shaSums[hash]; ok { |
Philipp Schrader | 67fe6d0 | 2022-04-16 15:37:40 -0700 | [diff] [blame] | 124 | // The path must match what it would be without the |
| 125 | // /sha256/<checksum>/ prefix. Otherwise it's too easy |
| 126 | // to make copy-paste mistakes. |
| 127 | if path != parts[3] { |
| 128 | log.Println("Got ", parts[3], "expected", path) |
| 129 | w.WriteHeader(http.StatusBadRequest) |
| 130 | return |
| 131 | } |
Philipp Schrader | 45721a7 | 2022-04-02 16:27:53 -0700 | [diff] [blame] | 132 | // We found a file with this checksum. Serve that file. |
| 133 | r.URL.Path = path |
| 134 | } else { |
| 135 | // No file with this checksum found. |
| 136 | w.WriteHeader(http.StatusNotFound) |
| 137 | return |
Alex Perry | b2f7652 | 2022-03-30 21:02:05 -0700 | [diff] [blame] | 138 | } |
| 139 | |
| 140 | h.ServeHTTP(w, r) |
| 141 | } |
| 142 | |
| 143 | return http.HandlerFunc(fn) |
| 144 | } |
| 145 | |
Philipp Schrader | 0fa4e93 | 2022-04-02 14:33:48 -0700 | [diff] [blame] | 146 | // Serve pages in the specified directory. |
Philipp Schrader | 5562df7 | 2022-02-16 20:56:51 -0800 | [diff] [blame] | 147 | func ServePages(scoutingServer server.ScoutingServer, directory string) { |
| 148 | // Serve the / endpoint given a folder of pages. |
Philipp Schrader | 45721a7 | 2022-04-02 16:27:53 -0700 | [diff] [blame] | 149 | scoutingServer.Handle("/", MaybeNoCache(http.FileServer(http.Dir(directory)))) |
| 150 | |
| 151 | // Also serve files in a checksum-addressable manner. |
| 152 | scoutingServer.Handle("/sha256/", HandleShaUrl(directory, http.FileServer(http.Dir(directory)))) |
Philipp Schrader | 5562df7 | 2022-02-16 20:56:51 -0800 | [diff] [blame] | 153 | } |