Add infrastructure for pre-compiling Python wheels

This patch makes it so `rules_python` uses our FRC971 mirror for
downloading wheels instead of reaching out to pypi.org. This also
means that `rules_python` will not build a wheel from scratch.

Instead, we now have a new script that will build wheels in as
reproducible a way as possible and then upload them to our internal
mirror.

Run with:

    $ bazel run //tools/python:mirror_pip_packages --config=k8_upstream_python -- --ssh_host software

This patch does not move the entire repo to use this new setup yet. I
still need to take inventory of all the pip packages we need.

Buildkite will enforce that wheels are uploaded and available on our
mirror for all pip dependencies.

Signed-off-by: Philipp Schrader <philipp.schrader@gmail.com>
Change-Id: I12a1a29026a13211ea279b5567ed70529f0c66b5
diff --git a/tools/python/BUILD b/tools/python/BUILD
index 37101f5..1fa7a55 100644
--- a/tools/python/BUILD
+++ b/tools/python/BUILD
@@ -75,3 +75,12 @@
     toolchain = ":upstream_py_runtime",
     toolchain_type = "@rules_python//python:toolchain_type",
 )
+
+py_binary(
+    name = "mirror_pip_packages",
+    srcs = ["mirror_pip_packages.py"],
+    deps = [
+        "@pip//pkginfo",
+        "@pip//requests",
+    ],
+)
diff --git a/tools/python/README.md b/tools/python/README.md
new file mode 100644
index 0000000..cf878f7
--- /dev/null
+++ b/tools/python/README.md
@@ -0,0 +1,56 @@
+FRC971's Python setup
+================================================================================
+
+How to depend on pip packages
+--------------------------------------------------------------------------------
+You can only depend on pip packages that are listed in our
+[requirements file][requirements_file]. Any package you see in there can be
+depended on.
+
+For example, depend on `numpy` like so:
+```python
+py_binary(
+    name = "bin",
+    srcs = ["bin.py"],
+    deps = [
+        "@pip//numpy",
+    ],
+)
+```
+
+The labels are "normalized". That means the entries in the [requirements
+file][requirements_file] may not be usable as-is. When you know the name of the
+package, apply the following transformations:
+
+1. Make the name lower-case.
+2. Replace all dots and dashes with underscores.
+
+The following are examples to show-case the various rules:
+
+* `Jinja2` becomes `@pip//jinja2`.
+* `absl-py` becomes `@pip//absl_py`.
+* `Flask-SQLAlchemy` becomes `@pip//flask_sqlalchemy`.
+* `ruamel.yaml` becomes `@pip//ruamel_yaml`.
+
+
+How to add new pip packages
+--------------------------------------------------------------------------------
+
+1. Add the new package you're interested in to `tools/python/requirements.txt`.
+2. Run the lock file generation script.
+
+        bazel run //tools/python:requirements.update
+
+
+How to make buildkite happy with new pip packages
+--------------------------------------------------------------------------------
+In order for buildkite to be able to use new pip packages, they have to be
+mirrored on frc971 infrastructure.
+
+1. Follow the above procedure for adding new pip packages if not already done.
+2. Run the mirroring script.
+
+        bazel run //tools/python:mirror_pip_packages --config=k8_upstream_python -- --ssh_host <software>
+
+    where `<software>` is the `ssh(1)` target for reaching the server that hosts
+    the FRC971 mirror.
diff --git a/tools/python/generate_pip_packages_in_docker.sh b/tools/python/generate_pip_packages_in_docker.sh
new file mode 100755
index 0000000..77cd068
--- /dev/null
+++ b/tools/python/generate_pip_packages_in_docker.sh
@@ -0,0 +1,118 @@
+#!/bin/bash
+
+# This script runs inside of a docker container to download the wheels in our
+# requirements lock file. If necessary, this script will also build wheels for
+# packages that are only available in source form. If a wheel is built from
+# source, it will be made more hermetic with the "auditwheel" tool. That tool
+# grafts system libraries into the wheel itself as per PEP600.
+#
+# This file is largely inspired by the manylinux demo:
+# https://github.com/pypa/python-manylinux-demo/blob/master/travis/build-wheels.sh
+#
+# This file is more complicated than the demo largely because of a bug in the
+# "auditwheel" tool. It can't deal with already-fixed packages. Once that's
+# fixed, I think we can simplify this script quite a bit.
+# https://github.com/pypa/auditwheel/issues/394
+
+set -o errexit
+set -o nounset
+set -o pipefail
+
+readonly PLAT="$1"
+readonly ARCH="$2"
+readonly PYTHON_VERSION="$3"
+readonly CALLER_ID="$4"
+
+readonly PYTHON_BIN="/opt/python/cp${PYTHON_VERSION}-cp${PYTHON_VERSION}/bin/python3"
+
+# Try to make the wheels reproducible by telling them we're in 1980.
+# Unfortunately, this is insufficient due to a pip bug.
+# https://github.com/pypa/pip/issues/9604
+export SOURCE_DATE_EPOCH=315532800
+
+SCRIPT_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"
+readonly SCRIPT_DIR
+
+clean_up() {
+  chown -R "${CALLER_ID}:${CALLER_ID}" "${SCRIPT_DIR}"
+}
+
+trap clean_up EXIT
+
+rm -rf \
+  "${SCRIPT_DIR}"/venv \
+  "${SCRIPT_DIR}"/wheelhouse_tmp \
+  "${SCRIPT_DIR}"/wheelhouse
+
+mkdir "${SCRIPT_DIR}"/venv
+pushd "${SCRIPT_DIR}"/venv
+
+"${PYTHON_BIN}" -m venv venv
+
+source venv/bin/activate
+
+readonly -a PIP_BIN=("${PYTHON_BIN}" -m pip)
+
+# Might be useful for debugging.
+"${PIP_BIN[@]}" --version
+
+mkdir "${SCRIPT_DIR}"/wheelhouse
+
+# Get wheels for everything. Everything is stored in a temporary wheelhouse in
+# case we need to run the "auditwheel" tool against them.
+"${PIP_BIN[@]}" wheel \
+  --no-deps \
+  -r "${SCRIPT_DIR}/requirements.lock.txt" \
+  -w "${SCRIPT_DIR}/wheelhouse_tmp/" \
+  | tee /tmp/pip-wheel.log
+
+# Find the list of packages that were built from source.
+# We need to suppress the exit code of grep here because it returns non-zero if
+# we have no source packages.
+source_packages=($( \
+  (grep -o '^\s\+Building wheel for [-_.a-zA-Z0-9]\+' /tmp/pip-wheel.log || :) \
+  | awk '{print $4}' \
+  | sort -u \
+  ))
+
+# Let the user know which packages we built ourselves.
+echo "The following packages were built from source based on pip's output."
+for package in "${source_packages[@]}"; do
+  echo " - ${package}"
+done
+if ((${#source_packages[@]} == 0)); then
+  echo " (no source packages)"
+fi
+
+# Find the list of actual wheel filenames we built.
+wheels_built_from_source=()
+for package in "${source_packages[@]}"; do
+  # Extract lines that look roughly like so:
+  # Created wheel for orderedset: filename=orderedset-2.0.3-cp39-cp39-linux_x86_64.whl size=382564 sha256=70fd9e3ab45cf737048b757ba219adf11a691963fbb88c9f16f6eef3866239a9
+  log_line="$(grep -o "Created wheel for ${package}: filename=[^ ]\\+" /tmp/pip-wheel.log)"
+  filename="$(cut -d= -f2 <<<"${log_line}")"
+  wheels_built_from_source+=("${filename}")
+done
+
+# Make the wheels we built more hermetic. The auditwheel tool will graft system
+# libraries into the wheel itself. The list of system libraries that will not
+# get grafted is here:
+# https://peps.python.org/pep-0599/#the-manylinux2014-policy
+for wheel in "${wheels_built_from_source[@]}"; do
+  wheel_path="${SCRIPT_DIR}/wheelhouse_tmp/${wheel}"
+  echo "Repairing wheel ${wheel}"
+  if ! auditwheel show "${wheel_path}"; then
+    echo "Assuming ${wheel} is a non-platform wheel. Skipping."
+    continue
+  fi
+  auditwheel repair \
+    --plat "${PLAT}_${ARCH}" \
+    --only-plat \
+    -w "${SCRIPT_DIR}"/wheelhouse/ \
+    "${wheel_path}"
+done
+
+# Copy the downloaded wheels into the final wheelhouse too.
+downloaded_wheels=($(grep '^Saved [^ ]\+\.whl$' /tmp/pip-wheel.log \
+  | awk '{print $2}'))
+cp "${downloaded_wheels[@]}" "${SCRIPT_DIR}"/wheelhouse/
diff --git a/tools/python/mirror_pip_packages.py b/tools/python/mirror_pip_packages.py
new file mode 100644
index 0000000..2b18406
--- /dev/null
+++ b/tools/python/mirror_pip_packages.py
@@ -0,0 +1,223 @@
+"""This script mirrors our pip package dependencies.
+
+This script looks at the requirements.lock.txt file and generate a wheel for
+each entry. Those wheels are then mirrored.
+
+See tools/python/README.md for some more information.
+"""
+
+import argparse
+import hashlib
+import json
+import os
+import pwd
+import subprocess
+import sys
+import tarfile
+from pathlib import Path
+from typing import List, Optional, Tuple
+
+import requests
+from pkginfo import Wheel
+
+PYTHON_VERSION = 39
+PLAT = "manylinux_2_28"
+ARCH = "x86_64"
+WHEELHOUSE_MIRROR_URL = "https://software.frc971.org/Build-Dependencies/wheelhouse"
+PY_DEPS_WWWW_DIR = "/var/www/html/files/frc971/Build-Dependencies/wheelhouse"
+
+
+def compute_sha256(data: bytes) -> str:
+    """Computes the sha256 checksum of a bytes sequence.
+
+    Args:
+        data: The bytes to checksum.
+
+    Returns:
+        The hex representation of the checksum.
+    """
+    hasher = hashlib.sha256()
+    hasher.update(data)
+    return hasher.hexdigest()
+
+
+def compute_file_sha256(filename: Path) -> str:
+    """Computes the sha256 checksum of the content of a file.
+
+    Args:
+        filename: The file to checksum.
+
+    Returns:
+        The hex representation of the checksum.
+    """
+    return compute_sha256(filename.read_bytes())
+
+
+def search_for_uploaded_wheel(wheel: Path, wheel_url: str) -> Tuple[bool, str]:
+    """Searches for this wheel on our internal mirror.
+
+    Since we can't build wheels reproducibly, our best option is to check
+    whether this wheel already exists on the mirror. If it does, we can skip
+    uploading it.
+
+    Args:
+        wheel: The wheel to search for on the mirror.
+        wheel_url: The URL where the wheel is expected if it exists on the mirror.
+
+    Returns:
+        A two-tuple. The first value is a boolean that signifies whether the
+        wheel was found on the mirror. The second value is a string. If the
+        wheel was not found on the mirror, this is an empty string. Otherwise,
+        this string contains the sha256 checksum of the wheel found on the
+        mirror.
+    """
+    # TODO(phil): A better way to do this would be to SSH into the host and
+    # look for files on the filesystem.
+    request = requests.get(wheel_url)
+
+    if request.status_code == 200:
+        return True, compute_sha256(request.content)
+    if request.status_code == 404:
+        return False, ""
+
+    raise RuntimeError(
+        f"Don't know what to do with status code {request.status_cdoe} when trying to get {wheel_url}"
+    )
+
+
+def copy_to_host_and_unpack(filename: str, ssh_host: str) -> None:
+    """Copies the tarball of wheels to the server and unpacks the tarball.
+
+    Args:
+        filename: The path to the tarball to be uploaded.
+        ssh_host: The server that will be passed to ssh(1) for uploading and
+            unpacking the tarball.
+    """
+    # TODO(phil): De-duplicate with tools/go/mirror_go_repos.py
+
+    subprocess.run(["scp", filename, f"{ssh_host}:"], check=True)
+
+    # Be careful not to use single quotes in these commands to avoid breaking
+    # the subprocess.run() invocation below.
+    command = " && ".join([
+        f"mkdir -p {PY_DEPS_WWWW_DIR}",
+        f"tar -C {PY_DEPS_WWWW_DIR} --no-same-owner -xvaf {filename.name}",
+        # Change the permissions so other users can read them (and checksum
+        # them).
+        f"find {PY_DEPS_WWWW_DIR}/ -type f -exec chmod 644 {{}} +",
+    ])
+
+    print("You might be asked for your sudo password shortly.")
+    subprocess.run(
+        ["ssh", "-t", ssh_host, f"sudo -u www-data bash -c '{command}'"],
+        check=True)
+
+
+def main(argv: List[str]) -> Optional[int]:
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        "-f",
+        "--force",
+        action="store_true",
+        help=("If set, ignores packages we have already uploaded and "
+              "possibly overwrite them with the just-built ones. Use with "
+              "extreme caution! This may easily cause issues with building "
+              "older commits. Use this only if you know what you're doing."))
+    parser.add_argument(
+        "--ssh_host",
+        type=str,
+        help=("The SSH host to copy the downloaded Go repositories to. This "
+              "should be software.971spartans.net where all the "
+              "Build-Dependencies files live. Only specify this if you have "
+              "access to the server."))
+    args = parser.parse_args(argv[1:])
+
+    root_dir = Path(os.environ["BUILD_WORKSPACE_DIRECTORY"])
+    caller = os.getenv("SUDO_USER") or os.environ["USER"]
+    caller_id = pwd.getpwnam(caller).pw_uid
+
+    python_dir = root_dir / "tools" / "python"
+
+    # Run the wheel generation script inside the docker container provided by
+    # the pypa/manylinux project.
+    # https://github.com/pypa/manylinux/
+    subprocess.run([
+        "docker",
+        "run",
+        "-it",
+        "-v",
+        f"{python_dir}:/opt/971_build/",
+        f"quay.io/pypa/{PLAT}_{ARCH}",
+        "/opt/971_build/generate_pip_packages_in_docker.sh",
+        PLAT,
+        ARCH,
+        str(PYTHON_VERSION),
+        str(caller_id),
+    ],
+                   check=True)
+
+    # Get the list of wheels we downloaded form pypi.org or built ourselves.
+    wheelhouse = python_dir / "wheelhouse"
+    wheels = wheelhouse.glob("*.whl")
+
+    # Assemble the override list. This list will tell rules_python to download
+    # from our mirror instead of pypi.org.
+    wheels_to_be_uploaded = []
+    override_information = {}
+    for wheel in sorted(wheels):
+        wheel_url = f"{WHEELHOUSE_MIRROR_URL}/{wheel.name}"
+        sha256 = compute_file_sha256(wheel)
+
+        # Check if we already have the wheel uploaded. If so, download that one
+        # into the wheelhouse. This lets us avoid non-reproducibility with pip
+        # and native extensions.
+        # https://github.com/pypa/pip/issues/9604
+        wheel_found, sha256_on_mirror = search_for_uploaded_wheel(
+            wheel, wheel_url)
+
+        if args.force:
+            if wheel_found and sha256 != sha256_on_mirror:
+                print(
+                    f"WARNING: The next upload wheel change sha256 for {wheel}!"
+                )
+            wheels_to_be_uploaded.append(wheel)
+        else:
+            if wheel_found:
+                sha256 = sha256_on_mirror
+            else:
+                wheels_to_be_uploaded.append(wheel)
+
+        # Update the override information for this wheel.
+        # We use lower-case for the package names here because that's what the
+        # requirements.lock.txt file uses.
+        info = Wheel(wheel)
+        override_information[f"{info.name.lower()}=={info.version}"] = {
+            "url": wheel_url,
+            "sha256": sha256,
+        }
+
+    print(f"We need to upload {len(wheels_to_be_uploaded)} wheels:")
+    for wheel in wheels_to_be_uploaded:
+        print(wheel)
+
+    # Create a tarball of all the wheels that need to be mirrored.
+    py_deps_tar = root_dir / "py_deps.tar"
+    with tarfile.open(py_deps_tar, "w") as tar:
+        for wheel in wheels_to_be_uploaded:
+            tar.add(wheel, arcname=wheel.name)
+
+    # Upload the wheels if requested.
+    if wheels_to_be_uploaded and args.ssh_host:
+        copy_to_host_and_unpack(py_deps_tar, args.ssh_host)
+    else:
+        print("Skipping mirroring because of lack of --ssh_host or there's "
+              "nothing to actually mirror.")
+
+    # Write out the overrides file.
+    override_file = python_dir / "whl_overrides.json"
+    override_file.write_text(
+        json.dumps(override_information, indent=4, sort_keys=True) + "\n")
+
+
+if __name__ == "__main__":
+    sys.exit(main(sys.argv))
diff --git a/tools/python/requirements.lock.txt b/tools/python/requirements.lock.txt
index 337eaf8..ba7e2d7 100644
--- a/tools/python/requirements.lock.txt
+++ b/tools/python/requirements.lock.txt
@@ -4,6 +4,14 @@
 #
 #    bazel run //tools/python:requirements.update
 #
+certifi==2022.9.14 \
+    --hash=sha256:36973885b9542e6bd01dea287b2b4b3b21236307c56324fcc3f1160f2d655ed5 \
+    --hash=sha256:e232343de1ab72c2aa521b625c80f699e356830fd0e2c620b465b304b17b0516
+    # via requests
+charset-normalizer==2.1.1 \
+    --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \
+    --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f
+    # via requests
 cycler==0.11.0 \
     --hash=sha256:3a27e95f763a428a739d2add979fa7494c912a32c17c4c38c4d5f082cad165a3 \
     --hash=sha256:9c87405839a19696e837b3b818fed3f5f69f16f1eec1a1ad77e043dcea9c772f
@@ -12,6 +20,10 @@
     --hash=sha256:545c05d0f7903a863c2020e07b8f0a57517f2c40d940bded77076397872d14ca \
     --hash=sha256:edf251d5d2cc0580d5f72de4621c338d8c66c5f61abb50cf486640f73c8194d5
     # via matplotlib
+idna==3.4 \
+    --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
+    --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
+    # via requests
 kiwisolver==1.3.2 \
     --hash=sha256:0007840186bacfaa0aba4466d5890334ea5938e0bb7e28078a0eb0e63b5b59d5 \
     --hash=sha256:19554bd8d54cf41139f376753af1a644b63c9ca93f8f72009d50a2080f870f77 \
@@ -177,6 +189,10 @@
     --hash=sha256:e3dacecfbeec9a33e932f00c6cd7996e62f53ad46fbe677577394aaa90ee419a \
     --hash=sha256:eb9fc393f3c61f9054e1ed26e6fe912c7321af2f41ff49d3f83d05bacf22cc78
     # via matplotlib
+pkginfo==1.8.3 \
+    --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \
+    --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c
+    # via -r tools/python/requirements.txt
 pyparsing==3.0.6 \
     --hash=sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4 \
     --hash=sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81
@@ -187,6 +203,10 @@
     --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
     --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
     # via matplotlib
+requests==2.28.1 \
+    --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \
+    --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349
+    # via -r tools/python/requirements.txt
 scipy==1.7.3 \
     --hash=sha256:033ce76ed4e9f62923e1f8124f7e2b0800db533828c853b402c7eec6e9465d80 \
     --hash=sha256:173308efba2270dcd61cd45a30dfded6ec0085b4b6eb33b5eb11ab443005e088 \
@@ -222,3 +242,7 @@
     --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
     --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
     # via python-dateutil
+urllib3==1.26.12 \
+    --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \
+    --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997
+    # via requests
diff --git a/tools/python/requirements.txt b/tools/python/requirements.txt
index 7bb0cc1..5d0f583 100644
--- a/tools/python/requirements.txt
+++ b/tools/python/requirements.txt
@@ -1,5 +1,8 @@
 # After updating this file, run:
 # $ bazel run //tools/python:requirements.update
+
 matplotlib
 numpy
+pkginfo
+requests
 scipy
diff --git a/tools/python/whl_overrides.json b/tools/python/whl_overrides.json
new file mode 100644
index 0000000..9499780
--- /dev/null
+++ b/tools/python/whl_overrides.json
@@ -0,0 +1,70 @@
+{
+    "certifi==2022.9.14": {
+        "sha256": "e232343de1ab72c2aa521b625c80f699e356830fd0e2c620b465b304b17b0516",
+        "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/certifi-2022.9.14-py3-none-any.whl"
+    },
+    "charset-normalizer==2.1.1": {
+        "sha256": "83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f",
+        "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/charset_normalizer-2.1.1-py3-none-any.whl"
+    },
+    "cycler==0.11.0": {
+        "sha256": "3a27e95f763a428a739d2add979fa7494c912a32c17c4c38c4d5f082cad165a3",
+        "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/cycler-0.11.0-py3-none-any.whl"
+    },
+    "fonttools==4.28.5": {
+        "sha256": "edf251d5d2cc0580d5f72de4621c338d8c66c5f61abb50cf486640f73c8194d5",
+        "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/fonttools-4.28.5-py3-none-any.whl"
+    },
+    "idna==3.4": {
+        "sha256": "90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2",
+        "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/idna-3.4-py3-none-any.whl"
+    },
+    "kiwisolver==1.3.2": {
+        "sha256": "30fa008c172355c7768159983a7270cb23838c4d7db73d6c0f6b60dde0d432c6",
+        "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/kiwisolver-1.3.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl"
+    },
+    "matplotlib==3.5.1": {
+        "sha256": "87900c67c0f1728e6db17c6809ec05c025c6624dcf96a8020326ea15378fe8e7",
+        "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/matplotlib-3.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl"
+    },
+    "numpy==1.21.5": {
+        "sha256": "c293d3c0321996cd8ffe84215ffe5d269fd9d1d12c6f4ffe2b597a7c30d3e593",
+        "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/numpy-1.21.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl"
+    },
+    "packaging==21.3": {
+        "sha256": "ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522",
+        "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/packaging-21.3-py3-none-any.whl"
+    },
+    "pillow==8.4.0": {
+        "sha256": "b8831cb7332eda5dc89b21a7bce7ef6ad305548820595033a4b03cf3091235ed",
+        "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/Pillow-8.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl"
+    },
+    "pkginfo==1.8.3": {
+        "sha256": "848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594",
+        "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/pkginfo-1.8.3-py2.py3-none-any.whl"
+    },
+    "pyparsing==3.0.6": {
+        "sha256": "04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4",
+        "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/pyparsing-3.0.6-py3-none-any.whl"
+    },
+    "python-dateutil==2.8.2": {
+        "sha256": "961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9",
+        "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/python_dateutil-2.8.2-py2.py3-none-any.whl"
+    },
+    "requests==2.28.1": {
+        "sha256": "8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349",
+        "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/requests-2.28.1-py3-none-any.whl"
+    },
+    "scipy==1.7.3": {
+        "sha256": "5d1cc2c19afe3b5a546ede7e6a44ce1ff52e443d12b231823268019f608b9b12",
+        "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/scipy-1.7.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl"
+    },
+    "six==1.16.0": {
+        "sha256": "8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254",
+        "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/six-1.16.0-py2.py3-none-any.whl"
+    },
+    "urllib3==1.26.12": {
+        "sha256": "b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997",
+        "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/urllib3-1.26.12-py2.py3-none-any.whl"
+    }
+}