Add infrastructure for pre-compiling Python wheels
This patch makes it so `rules_python` uses our FRC971 mirror for
downloading wheels instead of reaching out to pypi.org. This also
means that `rules_python` will not build a wheel from scratch.
Instead, we now have a new script that will build wheels in as
reproducible a way as possible and then upload them to our internal
mirror.
Run with:
$ bazel run //tools/python:mirror_pip_packages --config=k8_upstream_python -- --ssh_host software
This patch does not move the entire repo to use this new setup yet. I
still need to take inventory of all the pip packages we need.
Buildkite will enforce that wheels are uploaded and available on our
mirror for all pip dependencies.
Signed-off-by: Philipp Schrader <philipp.schrader@gmail.com>
Change-Id: I12a1a29026a13211ea279b5567ed70529f0c66b5
diff --git a/.gitignore b/.gitignore
index f881856..c055f98 100644
--- a/.gitignore
+++ b/.gitignore
@@ -15,3 +15,8 @@
# run "vagrant up".
/vm/.vagrant/
/vm/workspace.vdi
+
+# Ignore temporary files for building Python wheels.
+/tools/python/venv/
+/tools/python/wheelhouse/
+/tools/python/wheelhouse_tmp/
diff --git a/WORKSPACE b/WORKSPACE
index 062fe25..13a8726 100644
--- a/WORKSPACE
+++ b/WORKSPACE
@@ -309,6 +309,8 @@
ci_configure(name = "ci_configure")
+load("@ci_configure//:ci.bzl", "RUNNING_IN_CI")
+
http_archive(
name = "platforms",
sha256 = "2c8d8347427e6bb0ba7cf9f933c08fe2be2b62ff2454546ad852f7bf267aad87",
@@ -403,6 +405,10 @@
http_archive(
name = "rules_python",
+ patch_args = ["-p1"],
+ patches = [
+ "//third_party:rules_python/0001-Support-overriding-individual-packages.patch",
+ ],
sha256 = "b593d13bb43c94ce94b483c2858e53a9b811f6f10e1e0eedc61073bd90e58d9c",
strip_prefix = "rules_python-0.12.0",
url = "https://github.com/bazelbuild/rules_python/archive/refs/tags/0.12.0.tar.gz",
@@ -421,7 +427,9 @@
pip_parse(
name = "pip_deps",
+ overrides = "//tools/python:whl_overrides.json",
python_interpreter_target = python_interpreter,
+ require_overrides = RUNNING_IN_CI,
requirements_lock = "//tools/python:requirements.lock.txt",
)
diff --git a/third_party/rules_python/0001-Support-overriding-individual-packages.patch b/third_party/rules_python/0001-Support-overriding-individual-packages.patch
new file mode 100644
index 0000000..1f5f1d5
--- /dev/null
+++ b/third_party/rules_python/0001-Support-overriding-individual-packages.patch
@@ -0,0 +1,269 @@
+From 843248c52d335f7ed51209cd7eee009a743b3488 Mon Sep 17 00:00:00 2001
+From: Philipp Schrader <philipp.schrader@gmail.com>
+Date: Sun, 11 Sep 2022 22:04:47 -0700
+Subject: [PATCH] Support overriding individual packages
+
+---
+ .../extract_wheels/extract_single_wheel.py | 60 ++++++++++---------
+ .../parse_requirements_to_bzl.py | 42 ++++++++++++-
+ python/pip_install/pip_repository.bzl | 34 +++++++++++
+ 3 files changed, 107 insertions(+), 29 deletions(-)
+
+diff --git a/python/pip_install/extract_wheels/extract_single_wheel.py b/python/pip_install/extract_wheels/extract_single_wheel.py
+index a7cc672..26d4368 100644
+--- a/python/pip_install/extract_wheels/extract_single_wheel.py
++++ b/python/pip_install/extract_wheels/extract_single_wheel.py
+@@ -28,41 +28,47 @@ def main() -> None:
+ type=annotation_from_str_path,
+ help="A json encoded file containing annotations for rendered packages.",
+ )
++ parser.add_argument(
++ "--pre-downloaded",
++ action="store_true",
++ help="If set, skips the pip download step. The .whl file is assumbed to be downloaded by bazel.",
++ )
+ arguments.parse_common_args(parser)
+ args = parser.parse_args()
+ deserialized_args = dict(vars(args))
+ arguments.deserialize_structured_args(deserialized_args)
+
+- configure_reproducible_wheels()
++ if not args.pre_downloaded:
++ configure_reproducible_wheels()
+
+- pip_args = (
+- [sys.executable, "-m", "pip"]
+- + (["--isolated"] if args.isolated else [])
+- + ["download" if args.download_only else "wheel", "--no-deps"]
+- + deserialized_args["extra_pip_args"]
+- )
++ pip_args = (
++ [sys.executable, "-m", "pip"]
++ + (["--isolated"] if args.isolated else [])
++ + ["download" if args.download_only else "wheel", "--no-deps"]
++ + deserialized_args["extra_pip_args"]
++ )
+
+- requirement_file = NamedTemporaryFile(mode="wb", delete=False)
+- try:
+- requirement_file.write(args.requirement.encode("utf-8"))
+- requirement_file.flush()
+- # Close the file so pip is allowed to read it when running on Windows.
+- # For more information, see: https://bugs.python.org/issue14243
+- requirement_file.close()
+- # Requirement specific args like --hash can only be passed in a requirements file,
+- # so write our single requirement into a temp file in case it has any of those flags.
+- pip_args.extend(["-r", requirement_file.name])
+-
+- env = os.environ.copy()
+- env.update(deserialized_args["environment"])
+- # Assumes any errors are logged by pip so do nothing. This command will fail if pip fails
+- subprocess.run(pip_args, check=True, env=env)
+- finally:
++ requirement_file = NamedTemporaryFile(mode="wb", delete=False)
+ try:
+- os.unlink(requirement_file.name)
+- except OSError as e:
+- if e.errno != errno.ENOENT:
+- raise
++ requirement_file.write(args.requirement.encode("utf-8"))
++ requirement_file.flush()
++ # Close the file so pip is allowed to read it when running on Windows.
++ # For more information, see: https://bugs.python.org/issue14243
++ requirement_file.close()
++ # Requirement specific args like --hash can only be passed in a requirements file,
++ # so write our single requirement into a temp file in case it has any of those flags.
++ pip_args.extend(["-r", requirement_file.name])
++
++ env = os.environ.copy()
++ env.update(deserialized_args["environment"])
++ # Assumes any errors are logged by pip so do nothing. This command will fail if pip fails
++ subprocess.run(pip_args, check=True, env=env)
++ finally:
++ try:
++ os.unlink(requirement_file.name)
++ except OSError as e:
++ if e.errno != errno.ENOENT:
++ raise
+
+ name, extras_for_pkg = requirements._parse_requirement_for_extra(args.requirement)
+ extras = {name: extras_for_pkg} if extras_for_pkg and name else dict()
+diff --git a/python/pip_install/extract_wheels/parse_requirements_to_bzl.py b/python/pip_install/extract_wheels/parse_requirements_to_bzl.py
+index 5762cf5..07642ca 100644
+--- a/python/pip_install/extract_wheels/parse_requirements_to_bzl.py
++++ b/python/pip_install/extract_wheels/parse_requirements_to_bzl.py
+@@ -4,7 +4,7 @@ import shlex
+ import sys
+ import textwrap
+ from pathlib import Path
+-from typing import Any, Dict, List, TextIO, Tuple
++from typing import Any, Dict, List, Optional, TextIO, Tuple
+
+ from pip._internal.network.session import PipSession
+ from pip._internal.req import constructors
+@@ -81,7 +81,7 @@ def parse_whl_library_args(args: argparse.Namespace) -> Dict[str, Any]:
+ whl_library_args.setdefault("python_interpreter", sys.executable)
+
+ # These arguments are not used by `whl_library`
+- for arg in ("requirements_lock", "requirements_lock_label", "annotations"):
++ for arg in ("requirements_lock", "requirements_lock_label", "annotations", "overrides", "require_overrides"):
+ if arg in whl_library_args:
+ whl_library_args.pop(arg)
+
+@@ -93,6 +93,8 @@ def generate_parsed_requirements_contents(
+ repo_prefix: str,
+ whl_library_args: Dict[str, Any],
+ annotations: Dict[str, str] = dict(),
++ overrides: Optional[Dict[str, Dict[str, str]]] = None,
++ require_overrides: bool = False,
+ ) -> str:
+ """
+ Parse each requirement from the requirements_lock file, and prepare arguments for each
+@@ -131,6 +133,13 @@ def generate_parsed_requirements_contents(
+ _packages = {repo_names_and_reqs}
+ _config = {args}
+ _annotations = {annotations}
++ _overrides = {overrides}
++ _require_overrides = {require_overrides}
++
++ _NOP_OVERRIDE = {{
++ "url": None,
++ "sha256": None,
++ }}
+
+ def _clean_name(name):
+ return name.replace("-", "_").replace(".", "_").lower()
+@@ -160,10 +169,20 @@ def generate_parsed_requirements_contents(
+
+ def install_deps():
+ for name, requirement in _packages:
++ override_name = requirement.split(" ")[0]
++ override = _overrides.get(override_name)
++ if not override:
++ if _require_overrides:
++ fail("Failed to find an override for \\"{{}}\\" in the \\"overrides\\" JSON file".format(override_name))
++ else:
++ override = _NOP_OVERRIDE
++
+ whl_library(
+ name = name,
+ requirement = requirement,
+ annotation = _get_annotation(requirement),
++ url = override["url"],
++ sha256 = override["sha256"],
+ **_config
+ )
+ """.format(
+@@ -178,6 +197,8 @@ def generate_parsed_requirements_contents(
+ repo_names_and_reqs=repo_names_and_reqs,
+ repo_prefix=repo_prefix,
+ wheel_file_label=bazel.WHEEL_FILE_LABEL,
++ overrides=overrides or {},
++ require_overrides=require_overrides,
+ )
+ )
+
+@@ -234,6 +255,16 @@ If set, it will take precedence over python_interpreter.",
+ type=annotation.annotations_map_from_str_path,
+ help="A json encoded file containing annotations for rendered packages.",
+ )
++ parser.add_argument(
++ "--overrides",
++ type=Path,
++ help="A json encoded file containing URL overrides for packages.",
++ )
++ parser.add_argument(
++ "--require-overrides",
++ action="store_true",
++ help="If set, requires that every requirement has a URL override in the --overrides JSON file.",
++ )
+ arguments.parse_common_args(parser)
+ args = parser.parse_args()
+
+@@ -259,6 +290,11 @@ If set, it will take precedence over python_interpreter.",
+ }
+ )
+
++ if args.overrides:
++ overrides = json.loads(args.overrides.read_text())
++ else:
++ overrides = None
++
+ output.write(
+ textwrap.dedent(
+ """\
+@@ -278,6 +314,8 @@ If set, it will take precedence over python_interpreter.",
+ repo_prefix=args.repo_prefix,
+ whl_library_args=whl_library_args,
+ annotations=annotated_requirements,
++ overrides=overrides,
++ require_overrides=args.require_overrides,
+ )
+ )
+
+diff --git a/python/pip_install/pip_repository.bzl b/python/pip_install/pip_repository.bzl
+index d729ae9..afe3102 100644
+--- a/python/pip_install/pip_repository.bzl
++++ b/python/pip_install/pip_repository.bzl
+@@ -257,6 +257,11 @@ def _pip_repository_impl(rctx):
+ args += ["--python_interpreter", _get_python_interpreter_attr(rctx)]
+ if rctx.attr.python_interpreter_target:
+ args += ["--python_interpreter_target", str(rctx.attr.python_interpreter_target)]
++ if rctx.attr.overrides:
++ overrides_file = rctx.path(rctx.attr.overrides).realpath
++ args += ["--overrides", overrides_file]
++ if rctx.attr.require_overrides:
++ args += ["--require-overrides"]
+ progress_message = "Parsing requirements to starlark"
+ else:
+ args = [
+@@ -391,6 +396,14 @@ pip_repository_attrs = {
+ default = False,
+ doc = "Create the repository in incremental mode.",
+ ),
++ "overrides": attr.label(
++ allow_single_file = True,
++ doc = "A JSON file containing overrides. TBD",
++ ),
++ "require_overrides": attr.bool(
++ default = False,
++ doc = "If True, every requirement must have an entry in the \"overrides\" JSON file.",
++ ),
+ "requirements": attr.label(
+ allow_single_file = True,
+ doc = "A 'requirements.txt' pip requirements file.",
+@@ -483,6 +496,16 @@ def _whl_library_impl(rctx):
+ "--annotation",
+ rctx.path(rctx.attr.annotation),
+ ])
++ if rctx.attr.url:
++ basename = rctx.attr.url.split("/")[-1]
++ download_result = rctx.download(
++ output = basename,
++ url = rctx.attr.url,
++ sha256 = rctx.attr.sha256 or None,
++ )
++ if not download_result.success:
++ fail("Failed to download {}".format(rctx.attr.url))
++ args.append("--pre-downloaded")
+
+ args = _parse_optional_attrs(rctx, args)
+
+@@ -515,6 +538,17 @@ whl_library_attrs = {
+ mandatory = True,
+ doc = "Python requirement string describing the package to make available",
+ ),
++ "url": attr.string(
++ doc = (
++ "Set this to download the package from the specified URL instead of using pip. "
++ ),
++ ),
++ "sha256": attr.string(
++ doc = (
++ "Optionally set this when using the 'url' parameter. " +
++ "Must be the expected checksum of the downloaded file."
++ ),
++ )
+ }
+
+ whl_library_attrs.update(**common_attrs)
diff --git a/tools/python/BUILD b/tools/python/BUILD
index 37101f5..1fa7a55 100644
--- a/tools/python/BUILD
+++ b/tools/python/BUILD
@@ -75,3 +75,12 @@
toolchain = ":upstream_py_runtime",
toolchain_type = "@rules_python//python:toolchain_type",
)
+
+py_binary(
+ name = "mirror_pip_packages",
+ srcs = ["mirror_pip_packages.py"],
+ deps = [
+ "@pip//pkginfo",
+ "@pip//requests",
+ ],
+)
diff --git a/tools/python/README.md b/tools/python/README.md
new file mode 100644
index 0000000..cf878f7
--- /dev/null
+++ b/tools/python/README.md
@@ -0,0 +1,56 @@
+FRC971's Python setup
+================================================================================
+
+How to depend on pip packages
+--------------------------------------------------------------------------------
+You can only depend on pip packages that are listed in our
+[requirements file][requirements_file]. Any package you see in there can be
+depended on.
+
+For example, depend on `numpy` like so:
+```python
+py_binary(
+ name = "bin",
+ srcs = ["bin.py"],
+ deps = [
+ "@pip//numpy",
+ ],
+)
+```
+
+The labels are "normalized". That means the entries in the [requirements
+file][requirements_file] may not be usable as-is. When you know the name of the
+package, apply the following transformations:
+
+1. Make the name lower-case.
+2. Replace all dots and dashes with underscores.
+
+The following are examples to show-case the various rules:
+
+* `Jinja2` becomes `@pip//jinja2`.
+* `absl-py` becomes `@pip//absl_py`.
+* `Flask-SQLAlchemy` becomes `@pip//flask_sqlalchemy`.
+* `ruamel.yaml` becomes `@pip//ruamel_yaml`.
+
+
+How to add new pip packages
+--------------------------------------------------------------------------------
+
+1. Add the new package you're interested in to `tools/python/requirements.txt`.
+2. Run the lock file generation script.
+
+ bazel run //tools/python:requirements.update
+
+
+How to make buildkite happy with new pip packages
+--------------------------------------------------------------------------------
+In order for buildkite to be able to use new pip packages, they have to be
+mirrored on frc971 infrastructure.
+
+1. Follow the above procedure for adding new pip packages if not already done.
+2. Run the mirroring script.
+
+ bazel run //tools/python:mirror_pip_packages --config=k8_upstream_python -- --ssh_host <software>
+
+ where `<software>` is the `ssh(1)` target for reaching the server that hosts
+ the FRC971 mirror.
diff --git a/tools/python/generate_pip_packages_in_docker.sh b/tools/python/generate_pip_packages_in_docker.sh
new file mode 100755
index 0000000..77cd068
--- /dev/null
+++ b/tools/python/generate_pip_packages_in_docker.sh
@@ -0,0 +1,118 @@
+#!/bin/bash
+
+# This script runs inside of a docker container to download the wheels in our
+# requirements lock file. If necessary, this script will also build wheels for
+# packages that are only available in source form. If a wheel is built from
+# source, it will be made more hermetic with the "auditwheel" tool. That tool
+# grafts system libraries into the wheel itself as per PEP600.
+#
+# This file is largely inspired by the manylinux demo:
+# https://github.com/pypa/python-manylinux-demo/blob/master/travis/build-wheels.sh
+#
+# This file is more complicated than the demo largely because of a bug in the
+# "auditwheel" tool. It can't deal with already-fixed packages. Once that's
+# fixed, I think we can simplify this script quite a bit.
+# https://github.com/pypa/auditwheel/issues/394
+
+set -o errexit
+set -o nounset
+set -o pipefail
+
+readonly PLAT="$1"
+readonly ARCH="$2"
+readonly PYTHON_VERSION="$3"
+readonly CALLER_ID="$4"
+
+readonly PYTHON_BIN="/opt/python/cp${PYTHON_VERSION}-cp${PYTHON_VERSION}/bin/python3"
+
+# Try to make the wheels reproducible by telling them we're in 1980.
+# Unfortunately, this is insufficient due to a pip bug.
+# https://github.com/pypa/pip/issues/9604
+export SOURCE_DATE_EPOCH=315532800
+
+SCRIPT_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"
+readonly SCRIPT_DIR
+
+clean_up() {
+ chown -R "${CALLER_ID}:${CALLER_ID}" "${SCRIPT_DIR}"
+}
+
+trap clean_up EXIT
+
+rm -rf \
+ "${SCRIPT_DIR}"/venv \
+ "${SCRIPT_DIR}"/wheelhouse_tmp \
+ "${SCRIPT_DIR}"/wheelhouse
+
+mkdir "${SCRIPT_DIR}"/venv
+pushd "${SCRIPT_DIR}"/venv
+
+"${PYTHON_BIN}" -m venv venv
+
+source venv/bin/activate
+
+readonly -a PIP_BIN=("${PYTHON_BIN}" -m pip)
+
+# Might be useful for debugging.
+"${PIP_BIN[@]}" --version
+
+mkdir "${SCRIPT_DIR}"/wheelhouse
+
+# Get wheels for everything. Everything is stored in a temporary wheelhouse in
+# case we need to run the "auditwheel" tool against them.
+"${PIP_BIN[@]}" wheel \
+ --no-deps \
+ -r "${SCRIPT_DIR}/requirements.lock.txt" \
+ -w "${SCRIPT_DIR}/wheelhouse_tmp/" \
+ | tee /tmp/pip-wheel.log
+
+# Find the list of packages that were built from source.
+# We need to suppress the exit code of grep here because it returns non-zero if
+# we have no source packages.
+source_packages=($( \
+ (grep -o '^\s\+Building wheel for [-_.a-zA-Z0-9]\+' /tmp/pip-wheel.log || :) \
+ | awk '{print $4}' \
+ | sort -u \
+ ))
+
+# Let the user know which packages we built ourselves.
+echo "The following packages were built from source based on pip's output."
+for package in "${source_packages[@]}"; do
+ echo " - ${package}"
+done
+if ((${#source_packages[@]} == 0)); then
+ echo " (no source packages)"
+fi
+
+# Find the list of actual wheel filenames we built.
+wheels_built_from_source=()
+for package in "${source_packages[@]}"; do
+ # Extract lines that look roughly like so:
+ # Created wheel for orderedset: filename=orderedset-2.0.3-cp39-cp39-linux_x86_64.whl size=382564 sha256=70fd9e3ab45cf737048b757ba219adf11a691963fbb88c9f16f6eef3866239a9
+ log_line="$(grep -o "Created wheel for ${package}: filename=[^ ]\\+" /tmp/pip-wheel.log)"
+ filename="$(cut -d= -f2 <<<"${log_line}")"
+ wheels_built_from_source+=("${filename}")
+done
+
+# Make the wheels we built more hermetic. The auditwheel tool will graft system
+# libraries into the wheel itself. The list of system libraries that will not
+# get grafted is here:
+# https://peps.python.org/pep-0599/#the-manylinux2014-policy
+for wheel in "${wheels_built_from_source[@]}"; do
+ wheel_path="${SCRIPT_DIR}/wheelhouse_tmp/${wheel}"
+ echo "Repairing wheel ${wheel}"
+ if ! auditwheel show "${wheel_path}"; then
+ echo "Assuming ${wheel} is a non-platform wheel. Skipping."
+ continue
+ fi
+ auditwheel repair \
+ --plat "${PLAT}_${ARCH}" \
+ --only-plat \
+ -w "${SCRIPT_DIR}"/wheelhouse/ \
+ "${wheel_path}"
+done
+
+# Copy the downloaded wheels into the final wheelhouse too.
+downloaded_wheels=($(grep '^Saved [^ ]\+\.whl$' /tmp/pip-wheel.log \
+ | awk '{print $2}'))
+cp "${downloaded_wheels[@]}" "${SCRIPT_DIR}"/wheelhouse/
diff --git a/tools/python/mirror_pip_packages.py b/tools/python/mirror_pip_packages.py
new file mode 100644
index 0000000..2b18406
--- /dev/null
+++ b/tools/python/mirror_pip_packages.py
@@ -0,0 +1,223 @@
+"""This script mirrors our pip package dependencies.
+
+This script looks at the requirements.lock.txt file and generate a wheel for
+each entry. Those wheels are then mirrored.
+
+See tools/python/README.md for some more information.
+"""
+
+import argparse
+import hashlib
+import json
+import os
+import pwd
+import subprocess
+import sys
+import tarfile
+from pathlib import Path
+from typing import List, Optional, Tuple
+
+import requests
+from pkginfo import Wheel
+
+PYTHON_VERSION = 39
+PLAT = "manylinux_2_28"
+ARCH = "x86_64"
+WHEELHOUSE_MIRROR_URL = "https://software.frc971.org/Build-Dependencies/wheelhouse"
+PY_DEPS_WWWW_DIR = "/var/www/html/files/frc971/Build-Dependencies/wheelhouse"
+
+
+def compute_sha256(data: bytes) -> str:
+ """Computes the sha256 checksum of a bytes sequence.
+
+ Args:
+ data: The bytes to checksum.
+
+ Returns:
+ The hex representation of the checksum.
+ """
+ hasher = hashlib.sha256()
+ hasher.update(data)
+ return hasher.hexdigest()
+
+
+def compute_file_sha256(filename: Path) -> str:
+ """Computes the sha256 checksum of the content of a file.
+
+ Args:
+ filename: The file to checksum.
+
+ Returns:
+ The hex representation of the checksum.
+ """
+ return compute_sha256(filename.read_bytes())
+
+
+def search_for_uploaded_wheel(wheel: Path, wheel_url: str) -> Tuple[bool, str]:
+ """Searches for this wheel on our internal mirror.
+
+ Since we can't build wheels reproducibly, our best option is to check
+ whether this wheel already exists on the mirror. If it does, we can skip
+ uploading it.
+
+ Args:
+ wheel: The wheel to search for on the mirror.
+ wheel_url: The URL where the wheel is expected if it exists on the mirror.
+
+ Returns:
+ A two-tuple. The first value is a boolean that signifies whether the
+ wheel was found on the mirror. The second value is a string. If the
+ wheel was not found on the mirror, this is an empty string. Otherwise,
+ this string contains the sha256 checksum of the wheel found on the
+ mirror.
+ """
+ # TODO(phil): A better way to do this would be to SSH into the host and
+ # look for files on the filesystem.
+ request = requests.get(wheel_url)
+
+ if request.status_code == 200:
+ return True, compute_sha256(request.content)
+ if request.status_code == 404:
+ return False, ""
+
+ raise RuntimeError(
+ f"Don't know what to do with status code {request.status_cdoe} when trying to get {wheel_url}"
+ )
+
+
+def copy_to_host_and_unpack(filename: str, ssh_host: str) -> None:
+ """Copies the tarball of wheels to the server and unpacks the tarball.
+
+ Args:
+ filename: The path to the tarball to be uploaded.
+ ssh_host: The server that will be passed to ssh(1) for uploading and
+ unpacking the tarball.
+ """
+ # TODO(phil): De-duplicate with tools/go/mirror_go_repos.py
+
+ subprocess.run(["scp", filename, f"{ssh_host}:"], check=True)
+
+ # Be careful not to use single quotes in these commands to avoid breaking
+ # the subprocess.run() invocation below.
+ command = " && ".join([
+ f"mkdir -p {PY_DEPS_WWWW_DIR}",
+ f"tar -C {PY_DEPS_WWWW_DIR} --no-same-owner -xvaf {filename.name}",
+ # Change the permissions so other users can read them (and checksum
+ # them).
+ f"find {PY_DEPS_WWWW_DIR}/ -type f -exec chmod 644 {{}} +",
+ ])
+
+ print("You might be asked for your sudo password shortly.")
+ subprocess.run(
+ ["ssh", "-t", ssh_host, f"sudo -u www-data bash -c '{command}'"],
+ check=True)
+
+
+def main(argv: List[str]) -> Optional[int]:
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "-f",
+ "--force",
+ action="store_true",
+ help=("If set, ignores packages we have already uploaded and "
+ "possibly overwrite them with the just-built ones. Use with "
+ "extreme caution! This may easily cause issues with building "
+ "older commits. Use this only if you know what you're doing."))
+ parser.add_argument(
+ "--ssh_host",
+ type=str,
+ help=("The SSH host to copy the downloaded Go repositories to. This "
+ "should be software.971spartans.net where all the "
+ "Build-Dependencies files live. Only specify this if you have "
+ "access to the server."))
+ args = parser.parse_args(argv[1:])
+
+ root_dir = Path(os.environ["BUILD_WORKSPACE_DIRECTORY"])
+ caller = os.getenv("SUDO_USER") or os.environ["USER"]
+ caller_id = pwd.getpwnam(caller).pw_uid
+
+ python_dir = root_dir / "tools" / "python"
+
+ # Run the wheel generation script inside the docker container provided by
+ # the pypa/manylinux project.
+ # https://github.com/pypa/manylinux/
+ subprocess.run([
+ "docker",
+ "run",
+ "-it",
+ "-v",
+ f"{python_dir}:/opt/971_build/",
+ f"quay.io/pypa/{PLAT}_{ARCH}",
+ "/opt/971_build/generate_pip_packages_in_docker.sh",
+ PLAT,
+ ARCH,
+ str(PYTHON_VERSION),
+ str(caller_id),
+ ],
+ check=True)
+
+ # Get the list of wheels we downloaded form pypi.org or built ourselves.
+ wheelhouse = python_dir / "wheelhouse"
+ wheels = wheelhouse.glob("*.whl")
+
+ # Assemble the override list. This list will tell rules_python to download
+ # from our mirror instead of pypi.org.
+ wheels_to_be_uploaded = []
+ override_information = {}
+ for wheel in sorted(wheels):
+ wheel_url = f"{WHEELHOUSE_MIRROR_URL}/{wheel.name}"
+ sha256 = compute_file_sha256(wheel)
+
+ # Check if we already have the wheel uploaded. If so, download that one
+ # into the wheelhouse. This lets us avoid non-reproducibility with pip
+ # and native extensions.
+ # https://github.com/pypa/pip/issues/9604
+ wheel_found, sha256_on_mirror = search_for_uploaded_wheel(
+ wheel, wheel_url)
+
+ if args.force:
+ if wheel_found and sha256 != sha256_on_mirror:
+ print(
+ f"WARNING: The next upload wheel change sha256 for {wheel}!"
+ )
+ wheels_to_be_uploaded.append(wheel)
+ else:
+ if wheel_found:
+ sha256 = sha256_on_mirror
+ else:
+ wheels_to_be_uploaded.append(wheel)
+
+ # Update the override information for this wheel.
+ # We use lower-case for the package names here because that's what the
+ # requirements.lock.txt file uses.
+ info = Wheel(wheel)
+ override_information[f"{info.name.lower()}=={info.version}"] = {
+ "url": wheel_url,
+ "sha256": sha256,
+ }
+
+ print(f"We need to upload {len(wheels_to_be_uploaded)} wheels:")
+ for wheel in wheels_to_be_uploaded:
+ print(wheel)
+
+ # Create a tarball of all the wheels that need to be mirrored.
+ py_deps_tar = root_dir / "py_deps.tar"
+ with tarfile.open(py_deps_tar, "w") as tar:
+ for wheel in wheels_to_be_uploaded:
+ tar.add(wheel, arcname=wheel.name)
+
+ # Upload the wheels if requested.
+ if wheels_to_be_uploaded and args.ssh_host:
+ copy_to_host_and_unpack(py_deps_tar, args.ssh_host)
+ else:
+ print("Skipping mirroring because of lack of --ssh_host or there's "
+ "nothing to actually mirror.")
+
+ # Write out the overrides file.
+ override_file = python_dir / "whl_overrides.json"
+ override_file.write_text(
+ json.dumps(override_information, indent=4, sort_keys=True) + "\n")
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv))
diff --git a/tools/python/requirements.lock.txt b/tools/python/requirements.lock.txt
index 337eaf8..ba7e2d7 100644
--- a/tools/python/requirements.lock.txt
+++ b/tools/python/requirements.lock.txt
@@ -4,6 +4,14 @@
#
# bazel run //tools/python:requirements.update
#
+certifi==2022.9.14 \
+ --hash=sha256:36973885b9542e6bd01dea287b2b4b3b21236307c56324fcc3f1160f2d655ed5 \
+ --hash=sha256:e232343de1ab72c2aa521b625c80f699e356830fd0e2c620b465b304b17b0516
+ # via requests
+charset-normalizer==2.1.1 \
+ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \
+ --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f
+ # via requests
cycler==0.11.0 \
--hash=sha256:3a27e95f763a428a739d2add979fa7494c912a32c17c4c38c4d5f082cad165a3 \
--hash=sha256:9c87405839a19696e837b3b818fed3f5f69f16f1eec1a1ad77e043dcea9c772f
@@ -12,6 +20,10 @@
--hash=sha256:545c05d0f7903a863c2020e07b8f0a57517f2c40d940bded77076397872d14ca \
--hash=sha256:edf251d5d2cc0580d5f72de4621c338d8c66c5f61abb50cf486640f73c8194d5
# via matplotlib
+idna==3.4 \
+ --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
+ --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
+ # via requests
kiwisolver==1.3.2 \
--hash=sha256:0007840186bacfaa0aba4466d5890334ea5938e0bb7e28078a0eb0e63b5b59d5 \
--hash=sha256:19554bd8d54cf41139f376753af1a644b63c9ca93f8f72009d50a2080f870f77 \
@@ -177,6 +189,10 @@
--hash=sha256:e3dacecfbeec9a33e932f00c6cd7996e62f53ad46fbe677577394aaa90ee419a \
--hash=sha256:eb9fc393f3c61f9054e1ed26e6fe912c7321af2f41ff49d3f83d05bacf22cc78
# via matplotlib
+pkginfo==1.8.3 \
+ --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \
+ --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c
+ # via -r tools/python/requirements.txt
pyparsing==3.0.6 \
--hash=sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4 \
--hash=sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81
@@ -187,6 +203,10 @@
--hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
--hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
# via matplotlib
+requests==2.28.1 \
+ --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \
+ --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349
+ # via -r tools/python/requirements.txt
scipy==1.7.3 \
--hash=sha256:033ce76ed4e9f62923e1f8124f7e2b0800db533828c853b402c7eec6e9465d80 \
--hash=sha256:173308efba2270dcd61cd45a30dfded6ec0085b4b6eb33b5eb11ab443005e088 \
@@ -222,3 +242,7 @@
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
# via python-dateutil
+urllib3==1.26.12 \
+ --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \
+ --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997
+ # via requests
diff --git a/tools/python/requirements.txt b/tools/python/requirements.txt
index 7bb0cc1..5d0f583 100644
--- a/tools/python/requirements.txt
+++ b/tools/python/requirements.txt
@@ -1,5 +1,8 @@
# After updating this file, run:
# $ bazel run //tools/python:requirements.update
+
matplotlib
numpy
+pkginfo
+requests
scipy
diff --git a/tools/python/whl_overrides.json b/tools/python/whl_overrides.json
new file mode 100644
index 0000000..9499780
--- /dev/null
+++ b/tools/python/whl_overrides.json
@@ -0,0 +1,70 @@
+{
+ "certifi==2022.9.14": {
+ "sha256": "e232343de1ab72c2aa521b625c80f699e356830fd0e2c620b465b304b17b0516",
+ "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/certifi-2022.9.14-py3-none-any.whl"
+ },
+ "charset-normalizer==2.1.1": {
+ "sha256": "83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f",
+ "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/charset_normalizer-2.1.1-py3-none-any.whl"
+ },
+ "cycler==0.11.0": {
+ "sha256": "3a27e95f763a428a739d2add979fa7494c912a32c17c4c38c4d5f082cad165a3",
+ "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/cycler-0.11.0-py3-none-any.whl"
+ },
+ "fonttools==4.28.5": {
+ "sha256": "edf251d5d2cc0580d5f72de4621c338d8c66c5f61abb50cf486640f73c8194d5",
+ "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/fonttools-4.28.5-py3-none-any.whl"
+ },
+ "idna==3.4": {
+ "sha256": "90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2",
+ "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/idna-3.4-py3-none-any.whl"
+ },
+ "kiwisolver==1.3.2": {
+ "sha256": "30fa008c172355c7768159983a7270cb23838c4d7db73d6c0f6b60dde0d432c6",
+ "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/kiwisolver-1.3.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl"
+ },
+ "matplotlib==3.5.1": {
+ "sha256": "87900c67c0f1728e6db17c6809ec05c025c6624dcf96a8020326ea15378fe8e7",
+ "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/matplotlib-3.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl"
+ },
+ "numpy==1.21.5": {
+ "sha256": "c293d3c0321996cd8ffe84215ffe5d269fd9d1d12c6f4ffe2b597a7c30d3e593",
+ "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/numpy-1.21.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl"
+ },
+ "packaging==21.3": {
+ "sha256": "ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522",
+ "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/packaging-21.3-py3-none-any.whl"
+ },
+ "pillow==8.4.0": {
+ "sha256": "b8831cb7332eda5dc89b21a7bce7ef6ad305548820595033a4b03cf3091235ed",
+ "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/Pillow-8.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl"
+ },
+ "pkginfo==1.8.3": {
+ "sha256": "848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594",
+ "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/pkginfo-1.8.3-py2.py3-none-any.whl"
+ },
+ "pyparsing==3.0.6": {
+ "sha256": "04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4",
+ "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/pyparsing-3.0.6-py3-none-any.whl"
+ },
+ "python-dateutil==2.8.2": {
+ "sha256": "961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9",
+ "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/python_dateutil-2.8.2-py2.py3-none-any.whl"
+ },
+ "requests==2.28.1": {
+ "sha256": "8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349",
+ "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/requests-2.28.1-py3-none-any.whl"
+ },
+ "scipy==1.7.3": {
+ "sha256": "5d1cc2c19afe3b5a546ede7e6a44ce1ff52e443d12b231823268019f608b9b12",
+ "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/scipy-1.7.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl"
+ },
+ "six==1.16.0": {
+ "sha256": "8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254",
+ "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/six-1.16.0-py2.py3-none-any.whl"
+ },
+ "urllib3==1.26.12": {
+ "sha256": "b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997",
+ "url": "https://software.frc971.org/Build-Dependencies/wheelhouse/urllib3-1.26.12-py2.py3-none-any.whl"
+ }
+}