Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 1 | #!/usr/bin/python3 |
| 2 | |
Austin Schuh | bffbe8b | 2023-11-22 21:32:05 -0800 | [diff] [blame] | 3 | from __future__ import annotations |
| 4 | import apt_pkg |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 5 | import collections |
Austin Schuh | bffbe8b | 2023-11-22 21:32:05 -0800 | [diff] [blame] | 6 | import contextlib |
Austin Schuh | 86d980e | 2023-10-20 22:44:47 -0700 | [diff] [blame] | 7 | import datetime |
Austin Schuh | bffbe8b | 2023-11-22 21:32:05 -0800 | [diff] [blame] | 8 | import functools |
| 9 | import jinja2 |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 10 | import os |
Austin Schuh | bffbe8b | 2023-11-22 21:32:05 -0800 | [diff] [blame] | 11 | import pathlib |
| 12 | import re |
| 13 | import shlex |
Austin Schuh | 86d980e | 2023-10-20 22:44:47 -0700 | [diff] [blame] | 14 | import shutil |
Austin Schuh | bffbe8b | 2023-11-22 21:32:05 -0800 | [diff] [blame] | 15 | import subprocess |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 16 | |
Jim Ostrowski | 12ef0ff | 2023-10-22 23:20:20 -0700 | [diff] [blame] | 17 | # Name of debian image to be created/modified |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 18 | IMAGE = "arm64_bookworm_debian_yocto.img" |
Jim Ostrowski | 12ef0ff | 2023-10-22 23:20:20 -0700 | [diff] [blame] | 19 | |
| 20 | # Path to yocto build for the orin (using meta-frc971) |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 21 | YOCTO = "/home/austin/local/jetpack/robot-yocto/build" |
| 22 | |
Jim Ostrowski | 12ef0ff | 2023-10-22 23:20:20 -0700 | [diff] [blame] | 23 | REQUIRED_DEPS = ["debootstrap", "u-boot-tools", "xfsprogs"] |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 24 | |
Austin Schuh | bffbe8b | 2023-11-22 21:32:05 -0800 | [diff] [blame] | 25 | apt_pkg.init_system() |
| 26 | |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 27 | |
| 28 | @contextlib.contextmanager |
| 29 | def scoped_loopback(image): |
| 30 | """Mounts an image as a loop back device.""" |
| 31 | result = subprocess.run(["sudo", "losetup", "--show", "-f", image], |
| 32 | check=True, |
| 33 | stdout=subprocess.PIPE) |
| 34 | device = result.stdout.decode('utf-8').strip() |
| 35 | print("Mounted", image, "to", repr(device)) |
| 36 | try: |
| 37 | yield device |
| 38 | finally: |
| 39 | subprocess.run(["sudo", "losetup", "-d", device], check=True) |
| 40 | |
| 41 | |
| 42 | @contextlib.contextmanager |
| 43 | def scoped_mount(image): |
| 44 | """Mounts an image as a partition.""" |
| 45 | partition = f"{image}.partition" |
| 46 | try: |
| 47 | os.mkdir(partition) |
| 48 | except FileExistsError: |
| 49 | pass |
| 50 | |
| 51 | result = subprocess.run(["sudo", "mount", "-o", "loop", image, partition], |
| 52 | check=True) |
| 53 | |
| 54 | try: |
| 55 | yield partition |
| 56 | finally: |
| 57 | subprocess.run( |
| 58 | ["sudo", "rm", f"{partition}/usr/bin/qemu-aarch64-static"]) |
| 59 | subprocess.run(["sudo", "umount", partition], check=True) |
| 60 | |
| 61 | |
| 62 | def check_required_deps(deps): |
| 63 | """Checks if the provided list of dependencies is installed.""" |
| 64 | missing_deps = [] |
| 65 | for dep in deps: |
| 66 | result = subprocess.run(["dpkg-query", "-W", "-f='${Status}'", dep], |
| 67 | check=True, |
| 68 | stdout=subprocess.PIPE) |
| 69 | |
| 70 | if "install ok installed" not in result.stdout.decode('utf-8'): |
| 71 | missing_deps.append(dep) |
| 72 | |
| 73 | if len(missing_deps) > 0: |
| 74 | print("Missing dependencies, please install:") |
| 75 | print("sudo apt-get install", " ".join(missing_deps)) |
Jim Ostrowski | 12ef0ff | 2023-10-22 23:20:20 -0700 | [diff] [blame] | 76 | exit() |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 77 | |
| 78 | |
| 79 | def make_image(image): |
| 80 | """Makes an image and creates an xfs filesystem on it.""" |
Jim Ostrowski | 12ef0ff | 2023-10-22 23:20:20 -0700 | [diff] [blame] | 81 | print("Creating image ", f"{image}") |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 82 | result = subprocess.run([ |
| 83 | "dd", "if=/dev/zero", f"of={image}", "bs=1", "count=0", |
| 84 | "seek=8589934592" |
| 85 | ], |
| 86 | check=True) |
| 87 | |
| 88 | with scoped_loopback(image) as loopback: |
| 89 | subprocess.run([ |
| 90 | "sudo", "mkfs.xfs", "-d", "su=128k", "-d", "sw=1", "-L", "rootfs", |
| 91 | loopback |
| 92 | ], |
| 93 | check=True) |
| 94 | |
| 95 | |
| 96 | def target_unescaped(cmd): |
| 97 | """Runs a command as root with bash -c cmd, ie without escaping.""" |
| 98 | subprocess.run([ |
| 99 | "sudo", "chroot", "--userspec=0:0", f"{PARTITION}", |
| 100 | "qemu-aarch64-static", "/bin/bash", "-c", cmd |
| 101 | ], |
| 102 | check=True) |
| 103 | |
| 104 | |
| 105 | def target(cmd): |
| 106 | """Runs a command as root with escaping.""" |
| 107 | target_unescaped(shlex.join([shlex.quote(c) for c in cmd])) |
| 108 | |
| 109 | |
| 110 | def pi_target_unescaped(cmd): |
| 111 | """Runs a command as pi with bash -c cmd, ie without escaping.""" |
| 112 | subprocess.run([ |
| 113 | "sudo", "chroot", "--userspec=pi:pi", "--groups=pi", f"{PARTITION}", |
| 114 | "qemu-aarch64-static", "/bin/bash", "-c", cmd |
| 115 | ], |
| 116 | check=True) |
| 117 | |
| 118 | |
| 119 | def pi_target(cmd): |
| 120 | """Runs a command as pi with escaping.""" |
| 121 | pi_target_unescaped(shlex.join([shlex.quote(c) for c in cmd])) |
| 122 | |
| 123 | |
| 124 | def copyfile(owner, permissions, file): |
| 125 | """Copies a file from contents/{file} with the provided owner and permissions.""" |
| 126 | print("copyfile", owner, permissions, file) |
| 127 | subprocess.run(["sudo", "cp", f"contents/{file}", f"{PARTITION}/{file}"], |
| 128 | check=True) |
| 129 | subprocess.run(["sudo", "chmod", permissions, f"{PARTITION}/{file}"], |
| 130 | check=True) |
| 131 | target(["chown", owner, f"/{file}"]) |
| 132 | |
| 133 | |
| 134 | def target_mkdir(owner_group, permissions, folder): |
| 135 | """Creates a directory recursively with the provided permissions and ownership.""" |
| 136 | print("target_mkdir", owner_group, permissions, folder) |
Jim Ostrowski | 12ef0ff | 2023-10-22 23:20:20 -0700 | [diff] [blame] | 137 | owner, group = owner_group.split(':') |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 138 | target( |
| 139 | ["install", "-d", "-m", permissions, "-o", owner, "-g", group, folder]) |
| 140 | |
| 141 | |
| 142 | def list_packages(): |
| 143 | """Lists all installed packages. |
| 144 | |
| 145 | Returns: |
| 146 | A dictionary with keys as packages, and values as versions. |
| 147 | """ |
| 148 | result = subprocess.run([ |
| 149 | "sudo", "chroot", "--userspec=0:0", f"{PARTITION}", |
| 150 | "qemu-aarch64-static", "/bin/bash", "-c", |
| 151 | "dpkg-query -W -f='${Package} ${Version}\n'" |
| 152 | ], |
| 153 | check=True, |
| 154 | stdout=subprocess.PIPE) |
| 155 | |
| 156 | device = result.stdout.decode('utf-8').strip() |
| 157 | |
| 158 | r = {} |
| 159 | for line in result.stdout.decode('utf-8').strip().split('\n'): |
| 160 | package, version = line.split(' ') |
| 161 | r[package] = version |
| 162 | |
| 163 | return r |
| 164 | |
| 165 | |
| 166 | def list_yocto_packages(): |
| 167 | """Lists all packages in the Yocto folder. |
| 168 | |
| 169 | Returns: |
| 170 | list of Package classes. |
| 171 | """ |
| 172 | Package = collections.namedtuple( |
| 173 | 'Package', ['path', 'name', 'version', 'architecture']) |
| 174 | result = [] |
| 175 | pathlist = pathlib.Path(f"{YOCTO}/tmp/deploy/deb").glob('**/*.deb') |
| 176 | for path in pathlist: |
| 177 | # Strip off the path, .deb, and split on _ to parse the package info. |
| 178 | s = os.path.basename(str(path))[:-4].split('_') |
| 179 | result.append(Package(str(path), s[0], s[1], s[2])) |
| 180 | |
| 181 | return result |
| 182 | |
| 183 | |
| 184 | def install_packages(new_packages, existing_packages): |
| 185 | """Installs the provided yocto packages, if they are new.""" |
| 186 | # To install the yocto packages, first copy them into a folder in /tmp, then install them, then clean the folder up. |
| 187 | target(["mkdir", "-p", "/tmp/yocto_packages"]) |
| 188 | try: |
| 189 | to_install = [] |
| 190 | for package in new_packages: |
| 191 | if package.name in existing_packages and existing_packages[ |
| 192 | package.name] == package.version: |
| 193 | print('Skipping', package) |
| 194 | continue |
| 195 | |
| 196 | subprocess.run([ |
| 197 | "sudo", "cp", package.path, |
| 198 | f"{PARTITION}/tmp/yocto_packages/{os.path.basename(package.path)}" |
| 199 | ], |
| 200 | check=True) |
| 201 | to_install.append(package) |
| 202 | |
| 203 | if len(to_install) > 0: |
| 204 | target(["dpkg", "-i"] + [ |
| 205 | f"/tmp/yocto_packages/{os.path.basename(package.path)}" |
| 206 | for package in to_install |
| 207 | ]) |
| 208 | |
| 209 | finally: |
| 210 | target(["rm", "-rf", "/tmp/yocto_packages"]) |
| 211 | |
| 212 | |
| 213 | def install_virtual_packages(virtual_packages): |
| 214 | """Builds and installs the provided virtual packages.""" |
| 215 | try: |
| 216 | target(["mkdir", "-p", "/tmp/yocto_packages"]) |
| 217 | for virtual_package in virtual_packages: |
| 218 | subprocess.run( |
| 219 | ["dpkg-deb", "--build", f"virtual_packages/{virtual_package}"], |
| 220 | check=True) |
| 221 | subprocess.run([ |
| 222 | "sudo", "cp", f"virtual_packages/{virtual_package}.deb", |
| 223 | f"{PARTITION}/tmp/yocto_packages/{virtual_package}.deb" |
| 224 | ], |
| 225 | check=True) |
| 226 | |
| 227 | target(["dpkg", "-i"] + [ |
| 228 | f"/tmp/yocto_packages/{package}.deb" |
| 229 | for package in virtual_packages |
| 230 | ]) |
| 231 | |
Austin Schuh | bffbe8b | 2023-11-22 21:32:05 -0800 | [diff] [blame] | 232 | for virtual_package in virtual_packages: |
| 233 | subprocess.run(["rm", f"virtual_packages/{virtual_package}.deb"], |
| 234 | check=True) |
| 235 | |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 236 | finally: |
| 237 | target(["rm", "-rf", "/tmp/yocto_packages"]) |
| 238 | |
| 239 | |
Austin Schuh | bffbe8b | 2023-11-22 21:32:05 -0800 | [diff] [blame] | 240 | class NameVersion: |
| 241 | """ Class representing a package name and optionally a version constraint. """ |
| 242 | |
| 243 | def __init__(self, nameversion: str): |
| 244 | # We are processing package names here like: |
| 245 | # python3:any |
| 246 | # python3-markdown (= 3.4.1-2) |
| 247 | if '(' in nameversion: |
| 248 | s = nameversion.split(' (') |
| 249 | self.name = s[0].strip() |
| 250 | |
| 251 | v = s[1][:-1].split(' ') |
| 252 | |
| 253 | self.operator = v[0] |
| 254 | self.version = v[1] |
| 255 | else: |
| 256 | self.name = nameversion.strip() |
| 257 | self.operator = None |
| 258 | self.version = None |
| 259 | |
| 260 | # Rip off :amd64 or :aarch64 from the name if it is here. |
| 261 | if ':' in self.name: |
| 262 | self.name = self.name.split(':')[0] |
| 263 | |
| 264 | def matches(self, other: NameVersion) -> bool: |
| 265 | """If self meets the requirements defined by other.""" |
| 266 | if other.name != self.name: |
| 267 | return False |
| 268 | |
| 269 | if other.operator is None: |
| 270 | return True |
| 271 | |
| 272 | vc = apt_pkg.version_compare(self.version, other.version) |
| 273 | if vc < 0: |
| 274 | return other.operator in ('<=', '<<') |
| 275 | elif vc == 0: |
| 276 | return other.operator in ('=', '>=', '<=') |
| 277 | elif vc > 0: |
| 278 | return other.operator in ('>=', '>>') |
| 279 | |
| 280 | def __repr__(self) -> str: |
| 281 | if self.operator is not None: |
| 282 | return f"NameVersion({self.name} ({self.operator} {self.version}))" |
| 283 | else: |
| 284 | return f"NameVersion({self.name})" |
| 285 | |
| 286 | |
| 287 | class Package: |
| 288 | |
| 289 | def __init__(self, name: str, provides: str, version: str, depends: str, |
| 290 | files: list[str]): |
| 291 | self.name = NameVersion(f"{name} (= {version})") |
| 292 | |
| 293 | self.provides = [self.name] |
| 294 | |
| 295 | if provides: |
| 296 | for package_and_version in provides.split(","): |
| 297 | self.provides.append(NameVersion(package_and_version)) |
| 298 | |
| 299 | self.depends = [] |
| 300 | if depends: |
| 301 | for package_and_version in depends.split(", "): |
| 302 | if ' | ' in package_and_version: |
| 303 | oneof = [] |
| 304 | for oneof_package_and_version in package_and_version.split( |
| 305 | ' | '): |
| 306 | oneof.append(NameVersion(oneof_package_and_version)) |
| 307 | self.depends.append(oneof) |
| 308 | else: |
| 309 | self.depends.append([NameVersion(package_and_version)]) |
| 310 | |
| 311 | self.files = files |
| 312 | |
| 313 | def update_filetypes(self, directories: set[str], symlinks: dict[str, |
| 314 | str]): |
| 315 | if hasattr(self, 'directories') or hasattr(self, 'symlinks'): |
| 316 | return |
| 317 | |
| 318 | self.directories = [] |
| 319 | self.symlinks = dict() |
| 320 | files = [] |
| 321 | for f in self.files: |
| 322 | if f in directories: |
| 323 | self.directories.append(f) |
| 324 | elif f in symlinks: |
| 325 | self.symlinks[f] = symlinks[f] |
| 326 | else: |
| 327 | files.append(f) |
| 328 | |
| 329 | self.files = files |
| 330 | |
| 331 | def matches(self, other: NameVersion) -> bool: |
| 332 | """If self meets the requirements defined by other.""" |
| 333 | return any(p.matches(other) for p in self.provides) |
| 334 | |
| 335 | def resolved_depends(self, packages: dict[Package]) -> list[Package]: |
| 336 | result = set() |
| 337 | |
| 338 | # The dependencies are lists of lists of dependencies. At least one |
| 339 | # element from each inner list needs to match for it to be valid. Most |
| 340 | # of the dependencies are going to be a single element list. |
| 341 | for p_or_list in self.depends: |
| 342 | resolved_set = set() |
| 343 | for oneof_package in p_or_list: |
| 344 | if oneof_package.name not in packages: |
| 345 | continue |
| 346 | |
| 347 | resolved_oneof_package = packages[oneof_package.name] |
| 348 | if resolved_oneof_package.matches(oneof_package): |
| 349 | resolved_set.add(resolved_oneof_package) |
| 350 | |
| 351 | if len(resolved_set) == 0: |
| 352 | raise RuntimeError( |
| 353 | f"Failed to find dependencies for {p_or_list}: {repr(self)}" |
| 354 | ) |
| 355 | |
| 356 | result.update(resolved_set) |
| 357 | |
| 358 | return sorted(list(result), key=lambda x: x.name.name) |
| 359 | |
| 360 | def headers(self) -> list[str]: |
| 361 | return [h for h in self.files if h.startswith('/usr/include')] |
| 362 | |
| 363 | def objects(self) -> list[str]: |
| 364 | result = [] |
| 365 | for file in self.files: |
| 366 | if not file.startswith('/usr'): |
| 367 | continue |
| 368 | |
| 369 | # Gotta love GDB extensions ...libc.so....py. Ignore them. |
| 370 | if file.endswith('.py'): |
| 371 | continue |
| 372 | |
| 373 | # We want to find things like libfoo.so.1.2.3.4.5. The .so needs to be last. |
| 374 | opath = file |
| 375 | found_so = False |
| 376 | while True: |
| 377 | opath, ext = os.path.splitext(opath) |
| 378 | if ext == '': |
| 379 | break |
| 380 | elif ext == '.so': |
| 381 | found_so = True |
| 382 | else: |
| 383 | found_so = False |
| 384 | |
| 385 | if found_so: |
| 386 | result.append(file) |
| 387 | |
| 388 | return sorted(result) |
| 389 | |
| 390 | def __repr__(self) -> str: |
| 391 | return f"{{ {repr(self.provides[0])}, \"provides\": {repr(self.provides[1:])}, \"depends\": {repr(self.depends)} }}" |
| 392 | |
| 393 | |
| 394 | class PkgConfig: |
| 395 | |
| 396 | def __init__(self, contents, package): |
| 397 | # The pkgconfig file format lets you specify variables and the expand |
| 398 | # them into the various fields. These are in the form |
| 399 | # asdf=15234 |
| 400 | self.variables = dict() |
| 401 | |
| 402 | self.package = package |
| 403 | self.libs = [] |
| 404 | self.cflags = [] |
| 405 | for line in contents.split('\n'): |
| 406 | line = line.strip() |
| 407 | # Parse everything so we learn if a new field shows up we don't |
| 408 | # know how to parse. |
| 409 | if line == '': |
| 410 | pass |
| 411 | elif line[0] == '#': |
| 412 | pass |
| 413 | elif line.startswith('Name:'): |
| 414 | self.name = self.expand(line.removeprefix('Name:').strip()) |
| 415 | elif line.startswith('Description:'): |
| 416 | self.description = self.expand( |
| 417 | line.removeprefix('Description:').strip()) |
| 418 | elif line.startswith('Version:'): |
| 419 | self.version = self.expand( |
| 420 | line.removeprefix('Version:').strip()) |
| 421 | elif line.startswith('Libs:'): |
| 422 | self.libs = self.expand( |
| 423 | line.removeprefix('Libs:').strip()).split() |
| 424 | elif line.startswith('Cflags:'): |
| 425 | self.cflags = self.expand( |
| 426 | line.removeprefix('Cflags:').strip()).split() |
| 427 | elif line.startswith('URL:'): |
| 428 | pass |
| 429 | elif line.startswith('Cflags.private:'): |
| 430 | pass |
| 431 | elif line.startswith('Requires:'): |
| 432 | pass |
| 433 | elif line.startswith('Requires.private:'): |
| 434 | pass |
| 435 | elif line.startswith('Libs.private:'): |
| 436 | pass |
| 437 | elif line.startswith('Conflicts:'): |
| 438 | pass |
| 439 | elif re.match('^[-a-zA-Z_0-9]* *=.*$', line): |
| 440 | split_line = re.split(' *= *', line) |
| 441 | self.variables[split_line[0]] = self.expand(split_line[1]) |
| 442 | else: |
| 443 | raise ValueError('Unknown line in pkgconfig file') |
| 444 | |
| 445 | if self.name is None: |
| 446 | raise RuntimeError("Failed to find Name.") |
| 447 | |
| 448 | def expand(self, line: str) -> str: |
| 449 | """ Expands a string with variable expansions in it like bash (${foo}). """ |
| 450 | for var in self.variables: |
| 451 | line = line.replace('${' + var + '}', self.variables[var]) |
| 452 | return line |
| 453 | |
| 454 | |
| 455 | class Filesystem: |
| 456 | |
| 457 | def __init__(self, partition): |
| 458 | self.partition = partition |
| 459 | # TODO(austin): I really want to be able to run this on an amd64 |
| 460 | # filesystem too, which won't work with qemu-aarch64-static. Pull it |
| 461 | # into a library. |
| 462 | result = subprocess.run([ |
| 463 | "sudo", "chroot", "--userspec=0:0", f"{self.partition}", |
| 464 | "qemu-aarch64-static", "/bin/bash", "-c", |
| 465 | "dpkg-query -W -f='Version: ${Version}\nPackage: ${Package}\nProvides: ${Provides}\nDepends: ${Depends}\n${db-fsys:Files}--\n'" |
| 466 | ], |
| 467 | check=True, |
| 468 | stdout=subprocess.PIPE) |
| 469 | |
| 470 | # Mapping from all package names (str) to their corresponding Package |
| 471 | # objects for that package. |
| 472 | self.packages = dict() |
| 473 | |
| 474 | package_in_progress = {'files': []} |
| 475 | files = set() |
| 476 | for line in result.stdout.decode('utf-8').strip().split('\n'): |
| 477 | if line == '--': |
| 478 | # We found the end of line deliminator, save the package and |
| 479 | # clear everything out. |
| 480 | new_package = Package(package_in_progress['Package'], |
| 481 | package_in_progress['Provides'], |
| 482 | package_in_progress['Version'], |
| 483 | package_in_progress['Depends'], |
| 484 | package_in_progress['files']) |
| 485 | |
| 486 | for provides in new_package.provides: |
| 487 | self.packages[provides.name] = new_package |
| 488 | |
| 489 | # Wipe everything so we detect if any fields are missing. |
| 490 | package_in_progress = {'files': []} |
| 491 | elif line.startswith("Version: "): |
| 492 | package_in_progress['Version'] = line.removeprefix("Version: ") |
| 493 | elif line.startswith("Package: "): |
| 494 | package_in_progress['Package'] = line.removeprefix("Package: ") |
| 495 | elif line.startswith("Provides: "): |
| 496 | package_in_progress['Provides'] = line.removeprefix( |
| 497 | "Provides: ") |
| 498 | elif line.startswith("Depends: "): |
| 499 | package_in_progress['Depends'] = line.removeprefix("Depends: ") |
| 500 | else: |
| 501 | assert (line.startswith(' ')) |
| 502 | f = line.removeprefix(' ') |
| 503 | package_in_progress['files'].append(f) |
| 504 | files.add(f) |
| 505 | |
| 506 | self.directories = set() |
| 507 | self.symlinks = dict() |
| 508 | |
| 509 | for root, walked_dirs, walked_files in os.walk(self.partition): |
| 510 | for entry in walked_files + walked_dirs: |
| 511 | full_target = os.path.join(root, entry) |
| 512 | if pathlib.Path(full_target).is_symlink(): |
| 513 | target = full_target.removeprefix(self.partition) |
| 514 | self.symlinks[target] = os.readlink(full_target) |
| 515 | |
| 516 | for file in files: |
| 517 | full_target = f"{self.partition}/{file}" |
| 518 | try: |
| 519 | if pathlib.Path(full_target).is_symlink(): |
| 520 | self.symlinks[file] = os.readlink(full_target) |
| 521 | |
| 522 | if pathlib.Path(full_target).is_dir(): |
| 523 | self.directories.add(file) |
| 524 | except PermissionError: |
| 525 | # Assume it is a file... |
| 526 | print("Failed to read", file) |
| 527 | pass |
| 528 | |
| 529 | # Directories are all the things before the last / |
| 530 | for parent in pathlib.Path(file).parents: |
| 531 | self.directories.add(parent) |
| 532 | |
| 533 | # Now, populate self.files with a mapping from each file to the owning |
| 534 | # package so we can do file ownership lookups. |
| 535 | visited = set() |
| 536 | self.files = dict() |
| 537 | for package in self.packages.values(): |
| 538 | if package in visited: |
| 539 | continue |
| 540 | visited.add(package) |
| 541 | |
| 542 | for f in package.files: |
| 543 | if f in self.directories: |
| 544 | continue |
| 545 | |
| 546 | if f in self.files: |
| 547 | print("Duplicate file", repr(f), ' current', package, |
| 548 | ' already', self.files[f]) |
| 549 | if not f.startswith('/usr/share'): |
| 550 | assert (f not in self.files) |
| 551 | self.files[f] = package |
| 552 | |
| 553 | # For each package, update the file list to track dependencies and symlinks correctly. |
| 554 | for p in self.packages.values(): |
| 555 | p.update_filetypes(self.directories, self.symlinks) |
| 556 | |
| 557 | # Print out all the libraries and where they live as known to ldconfig |
| 558 | result = subprocess.run( |
| 559 | ['ldconfig', '-C', f'{self.partition}/etc/ld.so.cache', '-p'], |
| 560 | check=True, |
| 561 | stdout=subprocess.PIPE, |
| 562 | ) |
| 563 | |
| 564 | self.ldconfig_cache = dict() |
| 565 | for line in result.stdout.decode('utf-8').split('\n'): |
| 566 | if line.startswith('\t'): |
| 567 | split_line = re.split(' \\(libc6,(AArch64|x86-64)\\) => ', |
| 568 | line.strip()) |
| 569 | self.ldconfig_cache[split_line[0]] = split_line[2] |
| 570 | |
| 571 | self.pkgcfg = dict() |
| 572 | for pkgconfig in [ |
| 573 | '/usr/local/lib/aarch64-linux-gnu/pkgconfig', |
| 574 | '/usr/local/lib/pkgconfig', |
| 575 | '/usr/local/share/pkgconfig', |
| 576 | '/usr/lib/aarch64-linux-gnu/pkgconfig', |
| 577 | '/usr/lib/pkgconfig', |
| 578 | '/usr/share/pkgconfig', |
| 579 | ]: |
| 580 | candidate_folder = f"{self.partition}/{pkgconfig}" |
| 581 | if not os.path.exists(candidate_folder): |
| 582 | continue |
| 583 | |
| 584 | for f in os.listdir(candidate_folder): |
| 585 | full_filename = f"{candidate_folder}/{f}" |
| 586 | if pathlib.Path(full_filename).is_dir(): |
| 587 | continue |
| 588 | if not f.endswith('.pc'): |
| 589 | continue |
| 590 | |
| 591 | package_name = f.removesuffix('.pc') |
| 592 | |
| 593 | with open(f"{candidate_folder}/{f}", "r") as file: |
| 594 | self.pkgcfg[package_name] = PkgConfig( |
| 595 | file.read(), self.files[f'{pkgconfig}/{f}']) |
| 596 | |
| 597 | def resolve_symlink(self, path: str) -> str: |
| 598 | """ Implements symlink resolution using self.symlinks. """ |
| 599 | # Only need to support absolute links since we don't have a concept of cwd. |
| 600 | |
| 601 | # Implements the symlink algorithm in |
| 602 | # https://android.googlesource.com/platform/bionic.git/+/android-4.0.1_r1/libc/bionic/realpath.c |
| 603 | assert (path[0] == '/') |
| 604 | |
| 605 | left = path.split('/')[1:] |
| 606 | |
| 607 | if len(path) == 0: |
| 608 | return path |
| 609 | |
| 610 | resolved = [''] |
| 611 | |
| 612 | while len(left) > 0: |
| 613 | if left[0] == '.': |
| 614 | left = left[1:] |
| 615 | elif left[0] == '..': |
| 616 | assert (len(resolved) >= 1) |
| 617 | resolved = resolved[:-1] |
| 618 | left = left[1:] |
| 619 | else: |
| 620 | resolved.append(left[0]) |
| 621 | merged = '/'.join(resolved) |
| 622 | if merged in self.symlinks: |
| 623 | symlink = self.symlinks[merged] |
| 624 | # Absolute symlink, blow away the previously accumulated path |
| 625 | if symlink[0] == '/': |
| 626 | resolved = [''] |
| 627 | left = symlink[1:].split('/') + left[1:] |
| 628 | else: |
| 629 | # Relative symlink, replace the symlink name in the path with the newly found target. |
| 630 | resolved = resolved[:-1] |
| 631 | left = symlink.split('/') + left[1:] |
| 632 | else: |
| 633 | left = left[1:] |
| 634 | |
| 635 | return '/'.join(resolved) |
| 636 | |
| 637 | def exists(self, path: str) -> bool: |
| 638 | if path in self.files or path in self.symlinks or path in self.directories: |
| 639 | return True |
| 640 | return False |
| 641 | |
| 642 | def resolve_object(self, |
| 643 | obj: str, |
| 644 | requesting_obj: str | None = None) -> str: |
| 645 | if obj in self.ldconfig_cache: |
| 646 | return self.resolve_symlink(self.ldconfig_cache[obj]) |
| 647 | elif requesting_obj is not None: |
| 648 | to_search = os.path.join(os.path.split(requesting_obj)[0], obj) |
| 649 | if self.exists(to_search): |
| 650 | return self.resolve_symlink(to_search) |
| 651 | |
| 652 | raise FileNotFoundError(obj) |
| 653 | |
| 654 | @functools.cache |
| 655 | def object_dependencies(self, obj: str) -> str: |
| 656 | result = subprocess.run( |
| 657 | ['objdump', '-p', f'{self.partition}/{obj}'], |
| 658 | check=True, |
| 659 | stdout=subprocess.PIPE, |
| 660 | ) |
| 661 | |
| 662 | # Part of the example output. We only want NEEDED from the dynamic section. |
| 663 | # |
| 664 | # RELRO off 0x0000000000128af0 vaddr 0x0000000000128af0 paddr 0x0000000000128af0 align 2**0 |
| 665 | # filesz 0x0000000000003510 memsz 0x0000000000003510 flags r-- |
| 666 | # |
| 667 | # Dynamic Section: |
| 668 | # NEEDED libtinfo.so.6 |
| 669 | # NEEDED libc.so.6 |
| 670 | # INIT 0x000000000002f000 |
| 671 | # FINI 0x00000000000efb94 |
| 672 | |
| 673 | deps = [] |
| 674 | for line in result.stdout.decode('utf-8').split('\n'): |
| 675 | if 'NEEDED' in line: |
| 676 | deps.append(line.strip().split()[1]) |
| 677 | |
| 678 | return deps |
| 679 | |
| 680 | |
| 681 | def generate_build_file(partition): |
| 682 | filesystem = Filesystem(partition) |
| 683 | |
| 684 | packages_to_eval = [ |
| 685 | filesystem.packages['libopencv-dev'], |
| 686 | filesystem.packages['libc6-dev'], |
| 687 | filesystem.packages['libstdc++-12-dev'], |
| 688 | filesystem.packages['libnpp-11-8-dev'], |
| 689 | ] |
| 690 | |
| 691 | # Recursively walk the tree using dijkstra's algorithm to generate targets |
| 692 | # for each set of headers. |
| 693 | print('Walking tree for', [p.name.name for p in packages_to_eval]) |
| 694 | |
| 695 | rules = [] |
| 696 | objs_to_eval = [] |
| 697 | |
| 698 | # Set of packages already generated in case our graph hits a package |
| 699 | # multiple times. |
| 700 | packages_visited_set = set() |
| 701 | while packages_to_eval: |
| 702 | next_package = packages_to_eval.pop() |
| 703 | if next_package in packages_visited_set: |
| 704 | continue |
| 705 | packages_visited_set.add(next_package) |
| 706 | |
| 707 | hdrs = next_package.headers() |
| 708 | objects = next_package.objects() |
| 709 | |
| 710 | deps = [] |
| 711 | for p in next_package.resolved_depends(filesystem.packages): |
| 712 | if p not in packages_visited_set: |
| 713 | packages_to_eval.append(p) |
| 714 | |
| 715 | # These two form a circular dependency... |
| 716 | # Don't add them since libc6 has no headers in it. |
| 717 | if next_package.name.name == 'libgcc-s1' and p.name.name == 'libc6': |
| 718 | continue |
| 719 | |
| 720 | deps.append(p.name.name) |
| 721 | |
| 722 | if objects: |
| 723 | objs_to_eval += objects |
| 724 | |
| 725 | hdrs.sort() |
| 726 | deps.sort() |
| 727 | hdrs = [f' "{h[1:]}",\n' for h in hdrs] |
| 728 | hdrs_files = ''.join(hdrs) |
| 729 | deps_joined = ''.join([f' ":{d}-headers",\n' for d in deps]) |
| 730 | |
| 731 | filegroup_srcs = ''.join( |
| 732 | [f' "{f[1:]}",\n' for f in next_package.files] + |
| 733 | [f' ":{d}-filegroup",\n' for d in deps]) |
| 734 | |
| 735 | rules.append( |
| 736 | f'filegroup(\n name = "{next_package.name.name}-filegroup",\n srcs = [\n{filegroup_srcs} ],\n)' |
| 737 | ) |
| 738 | rules.append( |
| 739 | f'cc_library(\n name = "{next_package.name.name}-headers",\n hdrs = [\n{hdrs_files} ],\n visibility = ["//visibility:public"],\n deps = [\n{deps_joined} ],\n)' |
| 740 | ) |
| 741 | |
| 742 | skip_set = set() |
| 743 | # These two are linker scripts. Since they are soooo deep in the |
| 744 | # hierarchy, let's not stress parsing them correctly. |
| 745 | skip_set.add('/usr/lib/aarch64-linux-gnu/libc.so') |
| 746 | skip_set.add('/usr/lib/gcc/aarch64-linux-gnu/12/libgcc_s.so') |
| 747 | |
| 748 | obj_set = set() |
| 749 | obj_set.update(skip_set) |
| 750 | |
| 751 | while objs_to_eval: |
| 752 | obj = objs_to_eval.pop() |
| 753 | if obj in obj_set: |
| 754 | continue |
| 755 | obj_set.add(obj) |
| 756 | |
| 757 | deps = filesystem.object_dependencies(obj) |
| 758 | resolved_deps = [] |
| 759 | for d in deps: |
| 760 | resolved_obj = filesystem.resolve_object(d, requesting_obj=obj) |
| 761 | resolved_deps.append(resolved_obj) |
| 762 | if resolved_obj not in obj_set: |
| 763 | objs_to_eval.append(resolved_obj) |
| 764 | |
| 765 | resolved_deps.sort() |
| 766 | rule_name = obj[1:].replace('/', '_') |
| 767 | rule_deps = ''.join([ |
| 768 | ' ":%s",\n'.format(d[1:].replace('/', '_')) |
| 769 | for d in resolved_deps if d not in skip_set |
| 770 | ]) |
| 771 | rules.append( |
| 772 | f'cc_library(\n name = "{rule_name}",\n srcs = ["{obj[1:]}"],\n deps = [\n{rule_deps} ],\n)' |
| 773 | ) |
| 774 | |
| 775 | standard_includes = set() |
| 776 | standard_includes.add('/usr/include') |
| 777 | standard_includes.add('/usr/include/aarch64-linux-gnu') |
| 778 | standard_includes.add('/usr/include/x86-64-linux-gnu') |
| 779 | for pkg in filesystem.pkgcfg: |
| 780 | try: |
| 781 | contents = filesystem.pkgcfg[pkg] |
| 782 | resolved_libraries = [ |
| 783 | filesystem.resolve_object('lib' + f.removeprefix('-l') + '.so') |
| 784 | for f in contents.libs if f.startswith('-l') |
| 785 | ] |
| 786 | |
| 787 | if contents.package not in packages_visited_set: |
| 788 | continue |
| 789 | |
| 790 | includes = [] |
| 791 | for flag in contents.cflags: |
| 792 | if flag.startswith('-I/') and flag.removeprefix( |
| 793 | '-I') not in standard_includes: |
| 794 | includes.append(flag.removeprefix('-I/')) |
| 795 | |
| 796 | rule_deps = ''.join( |
| 797 | sorted([ |
| 798 | ' ":' + l[1:].replace('/', '_') + '",\n' |
| 799 | for l in resolved_libraries |
| 800 | ] + [f' ":{contents.package.name.name}-headers",\n'])) |
| 801 | includes.sort() |
| 802 | if len(includes) > 0: |
| 803 | includes_string = ' includes = ["' + '", "'.join( |
| 804 | includes) + '"],\n' |
| 805 | else: |
| 806 | includes_string = '' |
| 807 | rules.append( |
| 808 | f'cc_library(\n name = "{pkg}",\n{includes_string} visibility = ["//visibility:public"],\n deps = [\n{rule_deps} ],\n)' |
| 809 | ) |
| 810 | # Look up which package this is from to include the headers |
| 811 | # Depend on all the libraries |
| 812 | # Parse -I -> includes |
| 813 | except FileNotFoundError: |
| 814 | print('Failed to instantiate package', repr(pkg)) |
| 815 | pass |
| 816 | |
| 817 | # Now, we want to figure out what the dependencies of opencv-dev are. |
| 818 | # Generate the dependency tree starting from an initial list of packages. |
| 819 | |
| 820 | # Then, figure out how to link the .so's in. Sometimes, multiple libraries exist per .deb, one target for all? |
| 821 | |
| 822 | with open("orin_debian_rootfs.BUILD.template", "r") as file: |
| 823 | template = jinja2.Template(file.read()) |
| 824 | |
| 825 | substitutions = { |
| 826 | "SYSROOT_SRCS": """glob( |
| 827 | include = [ |
| 828 | "include/**", |
| 829 | "lib/**", |
| 830 | "lib64/**", |
| 831 | "usr/include/**", |
| 832 | "usr/local/**", |
| 833 | "usr/lib/**", |
| 834 | "usr/lib64/**", |
| 835 | ], |
| 836 | exclude = [ |
| 837 | "usr/share/**", |
Austin Schuh | bffbe8b | 2023-11-22 21:32:05 -0800 | [diff] [blame] | 838 | ], |
| 839 | )""", |
| 840 | "RULES": '\n\n'.join(rules), |
| 841 | } |
| 842 | |
| 843 | with open("../../compilers/orin_debian_rootfs.BUILD", "w") as file: |
| 844 | file.write(template.render(substitutions)) |
| 845 | |
| 846 | |
| 847 | def do_package(partition): |
| 848 | tarball = datetime.date.today().strftime( |
| 849 | f"{os.getcwd()}/%Y-%m-%d-bookworm-arm64-nvidia-rootfs.tar") |
| 850 | print(tarball) |
| 851 | |
| 852 | subprocess.run([ |
| 853 | "sudo", |
| 854 | "tar", |
| 855 | "--sort=name", |
| 856 | "--mtime=0", |
| 857 | "--owner=0", |
| 858 | "--group=0", |
| 859 | "--numeric-owner", |
| 860 | "--exclude=./usr/share/ca-certificates", |
| 861 | "--exclude=./home", |
| 862 | "--exclude=./root", |
| 863 | "--exclude=./usr/src", |
| 864 | "--exclude=./usr/lib/mesa-diverted", |
| 865 | "--exclude=./usr/bin/X11", |
| 866 | "--exclude=./usr/lib/systemd/system/system-systemd*cryptsetup.slice", |
| 867 | "--exclude=./dev", |
| 868 | "--exclude=./usr/local/cuda-11.8/bin/fatbinary", |
| 869 | "--exclude=./usr/local/cuda-11.8/bin/ptxas", |
| 870 | "--exclude=./usr/local/cuda-11.8/include/thrust", |
| 871 | "--exclude=./usr/local/cuda-11.8/include/nv", |
| 872 | "--exclude=./usr/local/cuda-11.8/include/cuda", |
| 873 | "--exclude=./usr/local/cuda-11.8/include/cub", |
Austin Schuh | 1fc0d48 | 2023-12-24 14:40:34 -0800 | [diff] [blame] | 874 | "--exclude=./usr/include/cub", |
| 875 | "--exclude=./usr/include/nv", |
| 876 | "--exclude=./usr/include/thrust", |
| 877 | "--exclude=./usr/include/cuda", |
Austin Schuh | bffbe8b | 2023-11-22 21:32:05 -0800 | [diff] [blame] | 878 | "--exclude=./usr/share", |
| 879 | "-cf", |
| 880 | tarball, |
| 881 | ".", |
| 882 | ], |
| 883 | cwd=partition, |
| 884 | check=True) |
| 885 | |
| 886 | # Pack ptxas and fatbinary into the spots that clang expect them to make compiling easy. |
| 887 | nvidia_cuda_toolkit_path = 'nvidia-cuda-toolkit' |
| 888 | if not os.path.exists(nvidia_cuda_toolkit_path): |
| 889 | os.mkdir(nvidia_cuda_toolkit_path) |
| 890 | |
| 891 | subprocess.run(['apt-get', 'download', 'nvidia-cuda-toolkit'], |
| 892 | cwd=nvidia_cuda_toolkit_path, |
| 893 | check=True) |
| 894 | |
| 895 | subprocess.run( |
| 896 | ['dpkg', '-x', |
| 897 | os.listdir(nvidia_cuda_toolkit_path)[0], '.'], |
| 898 | cwd=nvidia_cuda_toolkit_path, |
| 899 | check=True) |
| 900 | |
| 901 | subprocess.run([ |
| 902 | "sudo", "tar", "--sort=name", "--mtime=0", "--owner=0", "--group=0", |
| 903 | "--numeric-owner", |
| 904 | '--transform=s|usr/bin/ptxas|usr/local/cuda-11.8/bin/ptxas|', |
| 905 | '--transform=s|usr/bin/fatbinary|usr/local/cuda-11.8/bin/aarch64-unknown-linux-gnu-fatbinary|', |
| 906 | "--append", "-f", tarball, "usr/bin/fatbinary", "usr/bin/ptxas" |
| 907 | ], |
| 908 | cwd=nvidia_cuda_toolkit_path, |
| 909 | check=True) |
| 910 | |
| 911 | subprocess.run(["sha256sum", tarball], check=True) |
| 912 | |
| 913 | |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 914 | def main(): |
| 915 | check_required_deps(REQUIRED_DEPS) |
| 916 | |
| 917 | new_image = not os.path.exists(IMAGE) |
| 918 | if new_image: |
| 919 | make_image(IMAGE) |
| 920 | |
| 921 | with scoped_mount(IMAGE) as partition: |
| 922 | if new_image: |
| 923 | subprocess.run([ |
| 924 | "sudo", "debootstrap", "--arch=arm64", "--no-check-gpg", |
| 925 | "--foreign", "bookworm", partition, |
| 926 | "http://deb.debian.org/debian/" |
| 927 | ], |
| 928 | check=True) |
| 929 | |
| 930 | subprocess.run([ |
| 931 | "sudo", "cp", "/usr/bin/qemu-aarch64-static", |
| 932 | f"{partition}/usr/bin/" |
| 933 | ], |
| 934 | check=True) |
| 935 | |
| 936 | global PARTITION |
| 937 | PARTITION = partition |
| 938 | |
| 939 | if new_image: |
| 940 | target(["/debootstrap/debootstrap", "--second-stage"]) |
| 941 | |
| 942 | target([ |
| 943 | "useradd", "-m", "-p", |
| 944 | '$y$j9T$85lzhdky63CTj.two7Zj20$pVY53UR0VebErMlm8peyrEjmxeiRw/rfXfx..9.xet1', |
| 945 | '-s', '/bin/bash', 'pi' |
| 946 | ]) |
| 947 | target(["addgroup", "debug"]) |
| 948 | target(["addgroup", "crypto"]) |
| 949 | target(["addgroup", "trusty"]) |
| 950 | |
| 951 | if not os.path.exists( |
| 952 | f"{partition}/etc/apt/sources.list.d/bullseye-backports.list"): |
Jim Ostrowski | 12ef0ff | 2023-10-22 23:20:20 -0700 | [diff] [blame] | 953 | copyfile("root:root", "644", |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 954 | "etc/apt/sources.list.d/bullseye-backports.list") |
| 955 | target(["apt-get", "update"]) |
| 956 | |
| 957 | target([ |
| 958 | "apt-get", "-y", "install", "gnupg", "wget", "systemd", |
| 959 | "systemd-resolved", "locales" |
| 960 | ]) |
| 961 | |
| 962 | target(["localedef", "-i", "en_US", "-f", "UTF-8", "en_US.UTF-8"]) |
| 963 | |
Jim Ostrowski | 12ef0ff | 2023-10-22 23:20:20 -0700 | [diff] [blame] | 964 | target_mkdir("root:root", "755", "run/systemd") |
| 965 | target_mkdir("systemd-resolve:systemd-resolve", "755", |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 966 | "run/systemd/resolve") |
Jim Ostrowski | 12ef0ff | 2023-10-22 23:20:20 -0700 | [diff] [blame] | 967 | copyfile("systemd-resolve:systemd-resolve", "644", |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 968 | "run/systemd/resolve/stub-resolv.conf") |
| 969 | target(["systemctl", "enable", "systemd-resolved"]) |
| 970 | |
| 971 | target([ |
| 972 | "apt-get", "-y", "install", "bpfcc-tools", "sudo", |
| 973 | "openssh-server", "python3", "bash-completion", "git", "v4l-utils", |
| 974 | "cpufrequtils", "pmount", "rsync", "vim-nox", "chrony", |
| 975 | "libopencv-calib3d406", "libopencv-contrib406", |
| 976 | "libopencv-core406", "libopencv-features2d406", |
| 977 | "libopencv-flann406", "libopencv-highgui406", |
| 978 | "libopencv-imgcodecs406", "libopencv-imgproc406", |
| 979 | "libopencv-ml406", "libopencv-objdetect406", "libopencv-photo406", |
| 980 | "libopencv-shape406", "libopencv-stitching406", |
| 981 | "libopencv-superres406", "libopencv-video406", |
| 982 | "libopencv-videoio406", "libopencv-videostab406", |
Austin Schuh | bffbe8b | 2023-11-22 21:32:05 -0800 | [diff] [blame] | 983 | "libopencv-viz406", "libopencv-dev", "libnice10", "pmount", |
| 984 | "libnice-dev", "feh", "libgstreamer1.0-0", |
| 985 | "libgstreamer-plugins-base1.0-0", "libgstreamer-plugins-bad1.0-0", |
| 986 | "gstreamer1.0-plugins-base", "gstreamer1.0-plugins-good", |
| 987 | "gstreamer1.0-plugins-bad", "gstreamer1.0-plugins-ugly", |
| 988 | "gstreamer1.0-nice", "usbutils", "locales", "trace-cmd", "clinfo", |
| 989 | "jq", "strace", "sysstat", "lm-sensors", "can-utils", "xfsprogs", |
| 990 | "gstreamer1.0-tools", "bridge-utils", "net-tools", "apt-file", |
Austin Schuh | 6243c76 | 2023-12-24 14:42:45 -0800 | [diff] [blame] | 991 | "parted", "xxd", "libv4l-dev", "file", "pkexec", "libxkbfile1", |
| 992 | "gdb" |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 993 | ]) |
| 994 | target(["apt-get", "clean"]) |
| 995 | |
| 996 | target(["usermod", "-a", "-G", "sudo", "pi"]) |
| 997 | target(["usermod", "-a", "-G", "video", "pi"]) |
| 998 | target(["usermod", "-a", "-G", "systemd-journal", "pi"]) |
| 999 | target(["usermod", "-a", "-G", "dialout", "pi"]) |
| 1000 | |
| 1001 | virtual_packages = [ |
| 1002 | 'libglib-2.0-0', 'libglvnd', 'libgtk-3-0', 'libxcb-glx', 'wayland' |
| 1003 | ] |
| 1004 | |
| 1005 | install_virtual_packages(virtual_packages) |
| 1006 | |
| 1007 | yocto_package_names = [ |
Austin Schuh | 86d980e | 2023-10-20 22:44:47 -0700 | [diff] [blame] | 1008 | 'tegra-argus-daemon', |
| 1009 | 'tegra-firmware', |
| 1010 | 'tegra-firmware-tegra234', |
| 1011 | 'tegra-firmware-vic', |
| 1012 | 'tegra-firmware-xusb', |
| 1013 | 'tegra-libraries-argus-daemon-base', |
| 1014 | 'tegra-libraries-camera', |
| 1015 | 'tegra-libraries-core', |
| 1016 | 'tegra-libraries-cuda', |
| 1017 | 'tegra-libraries-eglcore', |
| 1018 | 'tegra-libraries-glescore', |
| 1019 | 'tegra-libraries-glxcore', |
| 1020 | 'tegra-libraries-multimedia', |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 1021 | 'tegra-libraries-multimedia-utils', |
Austin Schuh | 86d980e | 2023-10-20 22:44:47 -0700 | [diff] [blame] | 1022 | 'tegra-libraries-multimedia-v4l', |
| 1023 | 'tegra-libraries-nvsci', |
| 1024 | 'tegra-libraries-vulkan', |
| 1025 | 'tegra-nvphs', |
| 1026 | 'tegra-nvphs-base', |
| 1027 | 'libnvidia-egl-wayland1', |
| 1028 | 'tegra-mmapi', |
| 1029 | 'tegra-mmapi-dev', |
| 1030 | 'cuda-cudart-11-8', |
| 1031 | 'cuda-cudart-11-8-dev', |
| 1032 | 'cuda-cudart-11-8-stubs', |
| 1033 | 'libcurand-11-8', |
| 1034 | 'libcurand-11-8-dev', |
| 1035 | 'libcurand-11-8-stubs', |
| 1036 | 'cuda-nvcc-11-8', |
| 1037 | 'tegra-cmake-overrides', |
| 1038 | 'cuda-target-environment', |
| 1039 | 'libnpp-11-8', |
| 1040 | 'libnpp-11-8-stubs', |
| 1041 | 'libnpp-11-8-dev', |
| 1042 | 'cuda-cccl-11-8', |
| 1043 | 'cuda-nvcc-11-8', |
| 1044 | 'cuda-nvcc-headers-11-8', |
Austin Schuh | bffbe8b | 2023-11-22 21:32:05 -0800 | [diff] [blame] | 1045 | 'nsight-systems-cli', |
| 1046 | 'nsight-systems-cli-qdstrmimporter', |
Austin Schuh | 6243c76 | 2023-12-24 14:42:45 -0800 | [diff] [blame] | 1047 | 'tegra-tools-jetson-clocks', |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 1048 | ] |
| 1049 | yocto_packages = list_yocto_packages() |
| 1050 | packages = list_packages() |
| 1051 | |
| 1052 | install_packages([ |
| 1053 | package for package in yocto_packages |
| 1054 | if package.name in yocto_package_names |
| 1055 | ], packages) |
| 1056 | |
| 1057 | # Now, install the kernel and modules after all the normal packages are in. |
| 1058 | yocto_packages_to_install = [ |
| 1059 | package for package in yocto_packages |
| 1060 | if (package.name.startswith('kernel-module-') or package.name. |
| 1061 | startswith('kernel-5.10') or package.name == 'kernel-modules') |
| 1062 | ] |
| 1063 | |
| 1064 | packages_to_remove = [] |
| 1065 | |
| 1066 | # Remove kernel-module-* packages + kernel- package. |
| 1067 | for key in packages: |
| 1068 | if key.startswith('kernel-module') or key.startswith( |
| 1069 | 'kernel-5.10'): |
| 1070 | already_installed = False |
| 1071 | for index, yocto_package in enumerate( |
| 1072 | yocto_packages_to_install): |
| 1073 | if key == yocto_package.name and packages[ |
| 1074 | key] == yocto_package.version: |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 1075 | already_installed = True |
| 1076 | del yocto_packages_to_install[index] |
| 1077 | break |
| 1078 | if not already_installed: |
| 1079 | packages_to_remove.append(key) |
| 1080 | |
| 1081 | print("Removing", packages_to_remove) |
| 1082 | if len(packages_to_remove) > 0: |
| 1083 | target(['dpkg', '--purge'] + packages_to_remove) |
| 1084 | print("Installing", |
| 1085 | [package.name for package in yocto_packages_to_install]) |
| 1086 | |
| 1087 | install_packages(yocto_packages_to_install, packages) |
| 1088 | |
| 1089 | target(["systemctl", "enable", "nvargus-daemon.service"]) |
| 1090 | |
Jim Ostrowski | 12ef0ff | 2023-10-22 23:20:20 -0700 | [diff] [blame] | 1091 | copyfile("root:root", "644", "etc/sysctl.d/sctp.conf") |
| 1092 | copyfile("root:root", "644", "etc/systemd/logind.conf") |
| 1093 | copyfile("root:root", "555", |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 1094 | "etc/bash_completion.d/aos_dump_autocomplete") |
Jim Ostrowski | 12ef0ff | 2023-10-22 23:20:20 -0700 | [diff] [blame] | 1095 | copyfile("root:root", "644", "etc/security/limits.d/rt.conf") |
| 1096 | copyfile("root:root", "644", "etc/systemd/system/usb-mount@.service") |
| 1097 | copyfile("root:root", "644", "etc/chrony/chrony.conf") |
| 1098 | target_mkdir("root:root", "700", "root/bin") |
| 1099 | target_mkdir("pi:pi", "755", "home/pi/.ssh") |
| 1100 | copyfile("pi:pi", "600", "home/pi/.ssh/authorized_keys") |
| 1101 | target_mkdir("root:root", "700", "root/bin") |
| 1102 | copyfile("root:root", "644", "etc/systemd/system/grow-rootfs.service") |
Austin Schuh | 6243c76 | 2023-12-24 14:42:45 -0800 | [diff] [blame] | 1103 | copyfile("root:root", "644", |
| 1104 | "etc/systemd/system/jetson-clocks.service") |
Jim Ostrowski | 12ef0ff | 2023-10-22 23:20:20 -0700 | [diff] [blame] | 1105 | copyfile("root:root", "500", "root/bin/change_hostname.sh") |
| 1106 | copyfile("root:root", "700", "root/trace.sh") |
| 1107 | copyfile("root:root", "440", "etc/sudoers") |
| 1108 | copyfile("root:root", "644", "etc/fstab") |
| 1109 | copyfile("root:root", "644", |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 1110 | "var/nvidia/nvcam/settings/camera_overrides.isp") |
Austin Schuh | bffbe8b | 2023-11-22 21:32:05 -0800 | [diff] [blame] | 1111 | copyfile("root.root", "644", "/etc/ld.so.conf.d/yocto.conf") |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 1112 | |
Jim Ostrowski | 12ef0ff | 2023-10-22 23:20:20 -0700 | [diff] [blame] | 1113 | target_mkdir("root:root", "755", "etc/systemd/network") |
| 1114 | copyfile("root:root", "644", "etc/systemd/network/eth0.network") |
| 1115 | copyfile("root:root", "644", "etc/systemd/network/80-can.network") |
Austin Schuh | bffbe8b | 2023-11-22 21:32:05 -0800 | [diff] [blame] | 1116 | copyfile("root:root", "644", "etc/udev/rules.d/nvidia.rules") |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 1117 | target(["/root/bin/change_hostname.sh", "pi-971-1"]) |
| 1118 | |
| 1119 | target(["systemctl", "enable", "systemd-networkd"]) |
| 1120 | target(["systemctl", "enable", "grow-rootfs"]) |
Austin Schuh | 6243c76 | 2023-12-24 14:42:45 -0800 | [diff] [blame] | 1121 | target(["systemctl", "enable", "jetson-clocks"]) |
Austin Schuh | bffbe8b | 2023-11-22 21:32:05 -0800 | [diff] [blame] | 1122 | |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 1123 | target(["apt-file", "update"]) |
| 1124 | |
| 1125 | target(["ldconfig"]) |
| 1126 | |
| 1127 | if not os.path.exists(f"{partition}/home/pi/.dotfiles"): |
| 1128 | pi_target_unescaped( |
| 1129 | "cd /home/pi/ && git clone --separate-git-dir=/home/pi/.dotfiles https://github.com/AustinSchuh/.dotfiles.git tmpdotfiles && rsync --recursive --verbose --exclude .git tmpdotfiles/ /home/pi/ && rm -r tmpdotfiles && git --git-dir=/home/pi/.dotfiles/ --work-tree=/home/pi/ config --local status.showUntrackedFiles no" |
| 1130 | ) |
| 1131 | pi_target(["vim", "-c", "\":qa!\""]) |
| 1132 | |
| 1133 | target_unescaped( |
| 1134 | "cd /root/ && git clone --separate-git-dir=/root/.dotfiles https://github.com/AustinSchuh/.dotfiles.git tmpdotfiles && rsync --recursive --verbose --exclude .git tmpdotfiles/ /root/ && rm -r tmpdotfiles && git --git-dir=/root/.dotfiles/ --work-tree=/root/ config --local status.showUntrackedFiles no" |
| 1135 | ) |
| 1136 | target(["vim", "-c", "\":qa!\""]) |
| 1137 | |
Austin Schuh | bffbe8b | 2023-11-22 21:32:05 -0800 | [diff] [blame] | 1138 | generate_build_file(partition) |
Austin Schuh | 86d980e | 2023-10-20 22:44:47 -0700 | [diff] [blame] | 1139 | |
Austin Schuh | bffbe8b | 2023-11-22 21:32:05 -0800 | [diff] [blame] | 1140 | do_package(partition) |
Austin Schuh | 86d980e | 2023-10-20 22:44:47 -0700 | [diff] [blame] | 1141 | |
Austin Schuh | 5101483 | 2023-10-20 17:44:45 -0700 | [diff] [blame] | 1142 | |
| 1143 | if __name__ == '__main__': |
| 1144 | main() |