blob: c378226fe6fdb71b97d34760c2b58360b7a8b523 [file] [log] [blame]
Austin Schuh51014832023-10-20 17:44:45 -07001#!/usr/bin/python3
2
Austin Schuhbffbe8b2023-11-22 21:32:05 -08003from __future__ import annotations
4import apt_pkg
Austin Schuh51014832023-10-20 17:44:45 -07005import collections
Austin Schuhbffbe8b2023-11-22 21:32:05 -08006import contextlib
Austin Schuh86d980e2023-10-20 22:44:47 -07007import datetime
Austin Schuhbffbe8b2023-11-22 21:32:05 -08008import functools
9import jinja2
Austin Schuh51014832023-10-20 17:44:45 -070010import os
Austin Schuhbffbe8b2023-11-22 21:32:05 -080011import pathlib
12import re
13import shlex
Austin Schuh86d980e2023-10-20 22:44:47 -070014import shutil
Austin Schuhbffbe8b2023-11-22 21:32:05 -080015import subprocess
Austin Schuh51014832023-10-20 17:44:45 -070016
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -070017# Name of debian image to be created/modified
Austin Schuh51014832023-10-20 17:44:45 -070018IMAGE = "arm64_bookworm_debian_yocto.img"
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -070019
20# Path to yocto build for the orin (using meta-frc971)
Austin Schuh51014832023-10-20 17:44:45 -070021YOCTO = "/home/austin/local/jetpack/robot-yocto/build"
22
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -070023REQUIRED_DEPS = ["debootstrap", "u-boot-tools", "xfsprogs"]
Austin Schuh51014832023-10-20 17:44:45 -070024
Austin Schuhbffbe8b2023-11-22 21:32:05 -080025apt_pkg.init_system()
26
Austin Schuh51014832023-10-20 17:44:45 -070027
28@contextlib.contextmanager
29def scoped_loopback(image):
30 """Mounts an image as a loop back device."""
31 result = subprocess.run(["sudo", "losetup", "--show", "-f", image],
32 check=True,
33 stdout=subprocess.PIPE)
34 device = result.stdout.decode('utf-8').strip()
35 print("Mounted", image, "to", repr(device))
36 try:
37 yield device
38 finally:
39 subprocess.run(["sudo", "losetup", "-d", device], check=True)
40
41
42@contextlib.contextmanager
43def scoped_mount(image):
44 """Mounts an image as a partition."""
45 partition = f"{image}.partition"
46 try:
47 os.mkdir(partition)
48 except FileExistsError:
49 pass
50
51 result = subprocess.run(["sudo", "mount", "-o", "loop", image, partition],
52 check=True)
53
54 try:
55 yield partition
56 finally:
57 subprocess.run(
58 ["sudo", "rm", f"{partition}/usr/bin/qemu-aarch64-static"])
59 subprocess.run(["sudo", "umount", partition], check=True)
60
61
62def check_required_deps(deps):
63 """Checks if the provided list of dependencies is installed."""
64 missing_deps = []
65 for dep in deps:
66 result = subprocess.run(["dpkg-query", "-W", "-f='${Status}'", dep],
67 check=True,
68 stdout=subprocess.PIPE)
69
70 if "install ok installed" not in result.stdout.decode('utf-8'):
71 missing_deps.append(dep)
72
73 if len(missing_deps) > 0:
74 print("Missing dependencies, please install:")
75 print("sudo apt-get install", " ".join(missing_deps))
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -070076 exit()
Austin Schuh51014832023-10-20 17:44:45 -070077
78
79def make_image(image):
80 """Makes an image and creates an xfs filesystem on it."""
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -070081 print("Creating image ", f"{image}")
Austin Schuh51014832023-10-20 17:44:45 -070082 result = subprocess.run([
83 "dd", "if=/dev/zero", f"of={image}", "bs=1", "count=0",
84 "seek=8589934592"
85 ],
86 check=True)
87
88 with scoped_loopback(image) as loopback:
89 subprocess.run([
90 "sudo", "mkfs.xfs", "-d", "su=128k", "-d", "sw=1", "-L", "rootfs",
91 loopback
92 ],
93 check=True)
94
95
96def target_unescaped(cmd):
97 """Runs a command as root with bash -c cmd, ie without escaping."""
98 subprocess.run([
99 "sudo", "chroot", "--userspec=0:0", f"{PARTITION}",
100 "qemu-aarch64-static", "/bin/bash", "-c", cmd
101 ],
102 check=True)
103
104
105def target(cmd):
106 """Runs a command as root with escaping."""
107 target_unescaped(shlex.join([shlex.quote(c) for c in cmd]))
108
109
110def pi_target_unescaped(cmd):
111 """Runs a command as pi with bash -c cmd, ie without escaping."""
112 subprocess.run([
113 "sudo", "chroot", "--userspec=pi:pi", "--groups=pi", f"{PARTITION}",
114 "qemu-aarch64-static", "/bin/bash", "-c", cmd
115 ],
116 check=True)
117
118
119def pi_target(cmd):
120 """Runs a command as pi with escaping."""
121 pi_target_unescaped(shlex.join([shlex.quote(c) for c in cmd]))
122
123
124def copyfile(owner, permissions, file):
125 """Copies a file from contents/{file} with the provided owner and permissions."""
126 print("copyfile", owner, permissions, file)
127 subprocess.run(["sudo", "cp", f"contents/{file}", f"{PARTITION}/{file}"],
128 check=True)
129 subprocess.run(["sudo", "chmod", permissions, f"{PARTITION}/{file}"],
130 check=True)
131 target(["chown", owner, f"/{file}"])
132
133
134def target_mkdir(owner_group, permissions, folder):
135 """Creates a directory recursively with the provided permissions and ownership."""
136 print("target_mkdir", owner_group, permissions, folder)
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -0700137 owner, group = owner_group.split(':')
Austin Schuh51014832023-10-20 17:44:45 -0700138 target(
139 ["install", "-d", "-m", permissions, "-o", owner, "-g", group, folder])
140
141
142def list_packages():
143 """Lists all installed packages.
144
145 Returns:
146 A dictionary with keys as packages, and values as versions.
147 """
148 result = subprocess.run([
149 "sudo", "chroot", "--userspec=0:0", f"{PARTITION}",
150 "qemu-aarch64-static", "/bin/bash", "-c",
151 "dpkg-query -W -f='${Package} ${Version}\n'"
152 ],
153 check=True,
154 stdout=subprocess.PIPE)
155
156 device = result.stdout.decode('utf-8').strip()
157
158 r = {}
159 for line in result.stdout.decode('utf-8').strip().split('\n'):
160 package, version = line.split(' ')
161 r[package] = version
162
163 return r
164
165
166def list_yocto_packages():
167 """Lists all packages in the Yocto folder.
168
169 Returns:
170 list of Package classes.
171 """
172 Package = collections.namedtuple(
173 'Package', ['path', 'name', 'version', 'architecture'])
174 result = []
175 pathlist = pathlib.Path(f"{YOCTO}/tmp/deploy/deb").glob('**/*.deb')
176 for path in pathlist:
177 # Strip off the path, .deb, and split on _ to parse the package info.
178 s = os.path.basename(str(path))[:-4].split('_')
179 result.append(Package(str(path), s[0], s[1], s[2]))
180
181 return result
182
183
184def install_packages(new_packages, existing_packages):
185 """Installs the provided yocto packages, if they are new."""
186 # To install the yocto packages, first copy them into a folder in /tmp, then install them, then clean the folder up.
187 target(["mkdir", "-p", "/tmp/yocto_packages"])
188 try:
189 to_install = []
190 for package in new_packages:
191 if package.name in existing_packages and existing_packages[
192 package.name] == package.version:
193 print('Skipping', package)
194 continue
195
196 subprocess.run([
197 "sudo", "cp", package.path,
198 f"{PARTITION}/tmp/yocto_packages/{os.path.basename(package.path)}"
199 ],
200 check=True)
201 to_install.append(package)
202
203 if len(to_install) > 0:
204 target(["dpkg", "-i"] + [
205 f"/tmp/yocto_packages/{os.path.basename(package.path)}"
206 for package in to_install
207 ])
208
209 finally:
210 target(["rm", "-rf", "/tmp/yocto_packages"])
211
212
213def install_virtual_packages(virtual_packages):
214 """Builds and installs the provided virtual packages."""
215 try:
216 target(["mkdir", "-p", "/tmp/yocto_packages"])
217 for virtual_package in virtual_packages:
218 subprocess.run(
219 ["dpkg-deb", "--build", f"virtual_packages/{virtual_package}"],
220 check=True)
221 subprocess.run([
222 "sudo", "cp", f"virtual_packages/{virtual_package}.deb",
223 f"{PARTITION}/tmp/yocto_packages/{virtual_package}.deb"
224 ],
225 check=True)
226
227 target(["dpkg", "-i"] + [
228 f"/tmp/yocto_packages/{package}.deb"
229 for package in virtual_packages
230 ])
231
Austin Schuhbffbe8b2023-11-22 21:32:05 -0800232 for virtual_package in virtual_packages:
233 subprocess.run(["rm", f"virtual_packages/{virtual_package}.deb"],
234 check=True)
235
Austin Schuh51014832023-10-20 17:44:45 -0700236 finally:
237 target(["rm", "-rf", "/tmp/yocto_packages"])
238
239
Austin Schuhbffbe8b2023-11-22 21:32:05 -0800240class NameVersion:
241 """ Class representing a package name and optionally a version constraint. """
242
243 def __init__(self, nameversion: str):
244 # We are processing package names here like:
245 # python3:any
246 # python3-markdown (= 3.4.1-2)
247 if '(' in nameversion:
248 s = nameversion.split(' (')
249 self.name = s[0].strip()
250
251 v = s[1][:-1].split(' ')
252
253 self.operator = v[0]
254 self.version = v[1]
255 else:
256 self.name = nameversion.strip()
257 self.operator = None
258 self.version = None
259
260 # Rip off :amd64 or :aarch64 from the name if it is here.
261 if ':' in self.name:
262 self.name = self.name.split(':')[0]
263
264 def matches(self, other: NameVersion) -> bool:
265 """If self meets the requirements defined by other."""
266 if other.name != self.name:
267 return False
268
269 if other.operator is None:
270 return True
271
272 vc = apt_pkg.version_compare(self.version, other.version)
273 if vc < 0:
274 return other.operator in ('<=', '<<')
275 elif vc == 0:
276 return other.operator in ('=', '>=', '<=')
277 elif vc > 0:
278 return other.operator in ('>=', '>>')
279
280 def __repr__(self) -> str:
281 if self.operator is not None:
282 return f"NameVersion({self.name} ({self.operator} {self.version}))"
283 else:
284 return f"NameVersion({self.name})"
285
286
287class Package:
288
289 def __init__(self, name: str, provides: str, version: str, depends: str,
290 files: list[str]):
291 self.name = NameVersion(f"{name} (= {version})")
292
293 self.provides = [self.name]
294
295 if provides:
296 for package_and_version in provides.split(","):
297 self.provides.append(NameVersion(package_and_version))
298
299 self.depends = []
300 if depends:
301 for package_and_version in depends.split(", "):
302 if ' | ' in package_and_version:
303 oneof = []
304 for oneof_package_and_version in package_and_version.split(
305 ' | '):
306 oneof.append(NameVersion(oneof_package_and_version))
307 self.depends.append(oneof)
308 else:
309 self.depends.append([NameVersion(package_and_version)])
310
311 self.files = files
312
313 def update_filetypes(self, directories: set[str], symlinks: dict[str,
314 str]):
315 if hasattr(self, 'directories') or hasattr(self, 'symlinks'):
316 return
317
318 self.directories = []
319 self.symlinks = dict()
320 files = []
321 for f in self.files:
322 if f in directories:
323 self.directories.append(f)
324 elif f in symlinks:
325 self.symlinks[f] = symlinks[f]
326 else:
327 files.append(f)
328
329 self.files = files
330
331 def matches(self, other: NameVersion) -> bool:
332 """If self meets the requirements defined by other."""
333 return any(p.matches(other) for p in self.provides)
334
335 def resolved_depends(self, packages: dict[Package]) -> list[Package]:
336 result = set()
337
338 # The dependencies are lists of lists of dependencies. At least one
339 # element from each inner list needs to match for it to be valid. Most
340 # of the dependencies are going to be a single element list.
341 for p_or_list in self.depends:
342 resolved_set = set()
343 for oneof_package in p_or_list:
344 if oneof_package.name not in packages:
345 continue
346
347 resolved_oneof_package = packages[oneof_package.name]
348 if resolved_oneof_package.matches(oneof_package):
349 resolved_set.add(resolved_oneof_package)
350
351 if len(resolved_set) == 0:
352 raise RuntimeError(
353 f"Failed to find dependencies for {p_or_list}: {repr(self)}"
354 )
355
356 result.update(resolved_set)
357
358 return sorted(list(result), key=lambda x: x.name.name)
359
360 def headers(self) -> list[str]:
361 return [h for h in self.files if h.startswith('/usr/include')]
362
363 def objects(self) -> list[str]:
364 result = []
365 for file in self.files:
366 if not file.startswith('/usr'):
367 continue
368
369 # Gotta love GDB extensions ...libc.so....py. Ignore them.
370 if file.endswith('.py'):
371 continue
372
373 # We want to find things like libfoo.so.1.2.3.4.5. The .so needs to be last.
374 opath = file
375 found_so = False
376 while True:
377 opath, ext = os.path.splitext(opath)
378 if ext == '':
379 break
380 elif ext == '.so':
381 found_so = True
382 else:
383 found_so = False
384
385 if found_so:
386 result.append(file)
387
388 return sorted(result)
389
390 def __repr__(self) -> str:
391 return f"{{ {repr(self.provides[0])}, \"provides\": {repr(self.provides[1:])}, \"depends\": {repr(self.depends)} }}"
392
393
394class PkgConfig:
395
396 def __init__(self, contents, package):
397 # The pkgconfig file format lets you specify variables and the expand
398 # them into the various fields. These are in the form
399 # asdf=15234
400 self.variables = dict()
401
402 self.package = package
403 self.libs = []
404 self.cflags = []
405 for line in contents.split('\n'):
406 line = line.strip()
407 # Parse everything so we learn if a new field shows up we don't
408 # know how to parse.
409 if line == '':
410 pass
411 elif line[0] == '#':
412 pass
413 elif line.startswith('Name:'):
414 self.name = self.expand(line.removeprefix('Name:').strip())
415 elif line.startswith('Description:'):
416 self.description = self.expand(
417 line.removeprefix('Description:').strip())
418 elif line.startswith('Version:'):
419 self.version = self.expand(
420 line.removeprefix('Version:').strip())
421 elif line.startswith('Libs:'):
422 self.libs = self.expand(
423 line.removeprefix('Libs:').strip()).split()
424 elif line.startswith('Cflags:'):
425 self.cflags = self.expand(
426 line.removeprefix('Cflags:').strip()).split()
427 elif line.startswith('URL:'):
428 pass
429 elif line.startswith('Cflags.private:'):
430 pass
431 elif line.startswith('Requires:'):
432 pass
433 elif line.startswith('Requires.private:'):
434 pass
435 elif line.startswith('Libs.private:'):
436 pass
437 elif line.startswith('Conflicts:'):
438 pass
439 elif re.match('^[-a-zA-Z_0-9]* *=.*$', line):
440 split_line = re.split(' *= *', line)
441 self.variables[split_line[0]] = self.expand(split_line[1])
442 else:
443 raise ValueError('Unknown line in pkgconfig file')
444
445 if self.name is None:
446 raise RuntimeError("Failed to find Name.")
447
448 def expand(self, line: str) -> str:
449 """ Expands a string with variable expansions in it like bash (${foo}). """
450 for var in self.variables:
451 line = line.replace('${' + var + '}', self.variables[var])
452 return line
453
454
455class Filesystem:
456
457 def __init__(self, partition):
458 self.partition = partition
459 # TODO(austin): I really want to be able to run this on an amd64
460 # filesystem too, which won't work with qemu-aarch64-static. Pull it
461 # into a library.
462 result = subprocess.run([
463 "sudo", "chroot", "--userspec=0:0", f"{self.partition}",
464 "qemu-aarch64-static", "/bin/bash", "-c",
465 "dpkg-query -W -f='Version: ${Version}\nPackage: ${Package}\nProvides: ${Provides}\nDepends: ${Depends}\n${db-fsys:Files}--\n'"
466 ],
467 check=True,
468 stdout=subprocess.PIPE)
469
470 # Mapping from all package names (str) to their corresponding Package
471 # objects for that package.
472 self.packages = dict()
473
474 package_in_progress = {'files': []}
475 files = set()
476 for line in result.stdout.decode('utf-8').strip().split('\n'):
477 if line == '--':
478 # We found the end of line deliminator, save the package and
479 # clear everything out.
480 new_package = Package(package_in_progress['Package'],
481 package_in_progress['Provides'],
482 package_in_progress['Version'],
483 package_in_progress['Depends'],
484 package_in_progress['files'])
485
486 for provides in new_package.provides:
487 self.packages[provides.name] = new_package
488
489 # Wipe everything so we detect if any fields are missing.
490 package_in_progress = {'files': []}
491 elif line.startswith("Version: "):
492 package_in_progress['Version'] = line.removeprefix("Version: ")
493 elif line.startswith("Package: "):
494 package_in_progress['Package'] = line.removeprefix("Package: ")
495 elif line.startswith("Provides: "):
496 package_in_progress['Provides'] = line.removeprefix(
497 "Provides: ")
498 elif line.startswith("Depends: "):
499 package_in_progress['Depends'] = line.removeprefix("Depends: ")
500 else:
501 assert (line.startswith(' '))
502 f = line.removeprefix(' ')
503 package_in_progress['files'].append(f)
504 files.add(f)
505
506 self.directories = set()
507 self.symlinks = dict()
508
509 for root, walked_dirs, walked_files in os.walk(self.partition):
510 for entry in walked_files + walked_dirs:
511 full_target = os.path.join(root, entry)
512 if pathlib.Path(full_target).is_symlink():
513 target = full_target.removeprefix(self.partition)
514 self.symlinks[target] = os.readlink(full_target)
515
516 for file in files:
517 full_target = f"{self.partition}/{file}"
518 try:
519 if pathlib.Path(full_target).is_symlink():
520 self.symlinks[file] = os.readlink(full_target)
521
522 if pathlib.Path(full_target).is_dir():
523 self.directories.add(file)
524 except PermissionError:
525 # Assume it is a file...
526 print("Failed to read", file)
527 pass
528
529 # Directories are all the things before the last /
530 for parent in pathlib.Path(file).parents:
531 self.directories.add(parent)
532
533 # Now, populate self.files with a mapping from each file to the owning
534 # package so we can do file ownership lookups.
535 visited = set()
536 self.files = dict()
537 for package in self.packages.values():
538 if package in visited:
539 continue
540 visited.add(package)
541
542 for f in package.files:
543 if f in self.directories:
544 continue
545
546 if f in self.files:
547 print("Duplicate file", repr(f), ' current', package,
548 ' already', self.files[f])
549 if not f.startswith('/usr/share'):
550 assert (f not in self.files)
551 self.files[f] = package
552
553 # For each package, update the file list to track dependencies and symlinks correctly.
554 for p in self.packages.values():
555 p.update_filetypes(self.directories, self.symlinks)
556
557 # Print out all the libraries and where they live as known to ldconfig
558 result = subprocess.run(
559 ['ldconfig', '-C', f'{self.partition}/etc/ld.so.cache', '-p'],
560 check=True,
561 stdout=subprocess.PIPE,
562 )
563
564 self.ldconfig_cache = dict()
565 for line in result.stdout.decode('utf-8').split('\n'):
566 if line.startswith('\t'):
567 split_line = re.split(' \\(libc6,(AArch64|x86-64)\\) => ',
568 line.strip())
569 self.ldconfig_cache[split_line[0]] = split_line[2]
570
571 self.pkgcfg = dict()
572 for pkgconfig in [
573 '/usr/local/lib/aarch64-linux-gnu/pkgconfig',
574 '/usr/local/lib/pkgconfig',
575 '/usr/local/share/pkgconfig',
576 '/usr/lib/aarch64-linux-gnu/pkgconfig',
577 '/usr/lib/pkgconfig',
578 '/usr/share/pkgconfig',
579 ]:
580 candidate_folder = f"{self.partition}/{pkgconfig}"
581 if not os.path.exists(candidate_folder):
582 continue
583
584 for f in os.listdir(candidate_folder):
585 full_filename = f"{candidate_folder}/{f}"
586 if pathlib.Path(full_filename).is_dir():
587 continue
588 if not f.endswith('.pc'):
589 continue
590
591 package_name = f.removesuffix('.pc')
592
593 with open(f"{candidate_folder}/{f}", "r") as file:
594 self.pkgcfg[package_name] = PkgConfig(
595 file.read(), self.files[f'{pkgconfig}/{f}'])
596
597 def resolve_symlink(self, path: str) -> str:
598 """ Implements symlink resolution using self.symlinks. """
599 # Only need to support absolute links since we don't have a concept of cwd.
600
601 # Implements the symlink algorithm in
602 # https://android.googlesource.com/platform/bionic.git/+/android-4.0.1_r1/libc/bionic/realpath.c
603 assert (path[0] == '/')
604
605 left = path.split('/')[1:]
606
607 if len(path) == 0:
608 return path
609
610 resolved = ['']
611
612 while len(left) > 0:
613 if left[0] == '.':
614 left = left[1:]
615 elif left[0] == '..':
616 assert (len(resolved) >= 1)
617 resolved = resolved[:-1]
618 left = left[1:]
619 else:
620 resolved.append(left[0])
621 merged = '/'.join(resolved)
622 if merged in self.symlinks:
623 symlink = self.symlinks[merged]
624 # Absolute symlink, blow away the previously accumulated path
625 if symlink[0] == '/':
626 resolved = ['']
627 left = symlink[1:].split('/') + left[1:]
628 else:
629 # Relative symlink, replace the symlink name in the path with the newly found target.
630 resolved = resolved[:-1]
631 left = symlink.split('/') + left[1:]
632 else:
633 left = left[1:]
634
635 return '/'.join(resolved)
636
637 def exists(self, path: str) -> bool:
638 if path in self.files or path in self.symlinks or path in self.directories:
639 return True
640 return False
641
642 def resolve_object(self,
643 obj: str,
644 requesting_obj: str | None = None) -> str:
645 if obj in self.ldconfig_cache:
646 return self.resolve_symlink(self.ldconfig_cache[obj])
647 elif requesting_obj is not None:
648 to_search = os.path.join(os.path.split(requesting_obj)[0], obj)
649 if self.exists(to_search):
650 return self.resolve_symlink(to_search)
651
652 raise FileNotFoundError(obj)
653
654 @functools.cache
655 def object_dependencies(self, obj: str) -> str:
656 result = subprocess.run(
657 ['objdump', '-p', f'{self.partition}/{obj}'],
658 check=True,
659 stdout=subprocess.PIPE,
660 )
661
662 # Part of the example output. We only want NEEDED from the dynamic section.
663 #
664 # RELRO off 0x0000000000128af0 vaddr 0x0000000000128af0 paddr 0x0000000000128af0 align 2**0
665 # filesz 0x0000000000003510 memsz 0x0000000000003510 flags r--
666 #
667 # Dynamic Section:
668 # NEEDED libtinfo.so.6
669 # NEEDED libc.so.6
670 # INIT 0x000000000002f000
671 # FINI 0x00000000000efb94
672
673 deps = []
674 for line in result.stdout.decode('utf-8').split('\n'):
675 if 'NEEDED' in line:
676 deps.append(line.strip().split()[1])
677
678 return deps
679
680
681def generate_build_file(partition):
682 filesystem = Filesystem(partition)
683
684 packages_to_eval = [
685 filesystem.packages['libopencv-dev'],
686 filesystem.packages['libc6-dev'],
687 filesystem.packages['libstdc++-12-dev'],
688 filesystem.packages['libnpp-11-8-dev'],
689 ]
690
691 # Recursively walk the tree using dijkstra's algorithm to generate targets
692 # for each set of headers.
693 print('Walking tree for', [p.name.name for p in packages_to_eval])
694
695 rules = []
696 objs_to_eval = []
697
698 # Set of packages already generated in case our graph hits a package
699 # multiple times.
700 packages_visited_set = set()
701 while packages_to_eval:
702 next_package = packages_to_eval.pop()
703 if next_package in packages_visited_set:
704 continue
705 packages_visited_set.add(next_package)
706
707 hdrs = next_package.headers()
708 objects = next_package.objects()
709
710 deps = []
711 for p in next_package.resolved_depends(filesystem.packages):
712 if p not in packages_visited_set:
713 packages_to_eval.append(p)
714
715 # These two form a circular dependency...
716 # Don't add them since libc6 has no headers in it.
717 if next_package.name.name == 'libgcc-s1' and p.name.name == 'libc6':
718 continue
719
720 deps.append(p.name.name)
721
722 if objects:
723 objs_to_eval += objects
724
725 hdrs.sort()
726 deps.sort()
727 hdrs = [f' "{h[1:]}",\n' for h in hdrs]
728 hdrs_files = ''.join(hdrs)
729 deps_joined = ''.join([f' ":{d}-headers",\n' for d in deps])
730
731 filegroup_srcs = ''.join(
732 [f' "{f[1:]}",\n' for f in next_package.files] +
733 [f' ":{d}-filegroup",\n' for d in deps])
734
735 rules.append(
736 f'filegroup(\n name = "{next_package.name.name}-filegroup",\n srcs = [\n{filegroup_srcs} ],\n)'
737 )
738 rules.append(
739 f'cc_library(\n name = "{next_package.name.name}-headers",\n hdrs = [\n{hdrs_files} ],\n visibility = ["//visibility:public"],\n deps = [\n{deps_joined} ],\n)'
740 )
741
742 skip_set = set()
743 # These two are linker scripts. Since they are soooo deep in the
744 # hierarchy, let's not stress parsing them correctly.
745 skip_set.add('/usr/lib/aarch64-linux-gnu/libc.so')
746 skip_set.add('/usr/lib/gcc/aarch64-linux-gnu/12/libgcc_s.so')
747
748 obj_set = set()
749 obj_set.update(skip_set)
750
751 while objs_to_eval:
752 obj = objs_to_eval.pop()
753 if obj in obj_set:
754 continue
755 obj_set.add(obj)
756
757 deps = filesystem.object_dependencies(obj)
758 resolved_deps = []
759 for d in deps:
760 resolved_obj = filesystem.resolve_object(d, requesting_obj=obj)
761 resolved_deps.append(resolved_obj)
762 if resolved_obj not in obj_set:
763 objs_to_eval.append(resolved_obj)
764
765 resolved_deps.sort()
766 rule_name = obj[1:].replace('/', '_')
767 rule_deps = ''.join([
Austin Schuh8de7def2024-01-01 12:47:02 -0800768 ' ":{}",\n'.format(d[1:].replace('/', '_'))
Austin Schuhbffbe8b2023-11-22 21:32:05 -0800769 for d in resolved_deps if d not in skip_set
770 ])
771 rules.append(
772 f'cc_library(\n name = "{rule_name}",\n srcs = ["{obj[1:]}"],\n deps = [\n{rule_deps} ],\n)'
773 )
774
775 standard_includes = set()
776 standard_includes.add('/usr/include')
777 standard_includes.add('/usr/include/aarch64-linux-gnu')
778 standard_includes.add('/usr/include/x86-64-linux-gnu')
779 for pkg in filesystem.pkgcfg:
780 try:
781 contents = filesystem.pkgcfg[pkg]
782 resolved_libraries = [
783 filesystem.resolve_object('lib' + f.removeprefix('-l') + '.so')
784 for f in contents.libs if f.startswith('-l')
785 ]
786
787 if contents.package not in packages_visited_set:
788 continue
789
790 includes = []
791 for flag in contents.cflags:
792 if flag.startswith('-I/') and flag.removeprefix(
793 '-I') not in standard_includes:
794 includes.append(flag.removeprefix('-I/'))
795
796 rule_deps = ''.join(
797 sorted([
798 ' ":' + l[1:].replace('/', '_') + '",\n'
799 for l in resolved_libraries
800 ] + [f' ":{contents.package.name.name}-headers",\n']))
801 includes.sort()
802 if len(includes) > 0:
803 includes_string = ' includes = ["' + '", "'.join(
804 includes) + '"],\n'
805 else:
806 includes_string = ''
807 rules.append(
808 f'cc_library(\n name = "{pkg}",\n{includes_string} visibility = ["//visibility:public"],\n deps = [\n{rule_deps} ],\n)'
809 )
810 # Look up which package this is from to include the headers
811 # Depend on all the libraries
812 # Parse -I -> includes
813 except FileNotFoundError:
814 print('Failed to instantiate package', repr(pkg))
815 pass
816
817 # Now, we want to figure out what the dependencies of opencv-dev are.
818 # Generate the dependency tree starting from an initial list of packages.
819
820 # Then, figure out how to link the .so's in. Sometimes, multiple libraries exist per .deb, one target for all?
821
822 with open("orin_debian_rootfs.BUILD.template", "r") as file:
823 template = jinja2.Template(file.read())
824
825 substitutions = {
826 "SYSROOT_SRCS": """glob(
827 include = [
828 "include/**",
829 "lib/**",
830 "lib64/**",
831 "usr/include/**",
832 "usr/local/**",
833 "usr/lib/**",
834 "usr/lib64/**",
835 ],
836 exclude = [
837 "usr/share/**",
Austin Schuhbffbe8b2023-11-22 21:32:05 -0800838 ],
839 )""",
840 "RULES": '\n\n'.join(rules),
841 }
842
843 with open("../../compilers/orin_debian_rootfs.BUILD", "w") as file:
844 file.write(template.render(substitutions))
845
Austin Schuh8de7def2024-01-01 12:47:02 -0800846 subprocess.run(['buildifier', "../../compilers/orin_debian_rootfs.BUILD"])
847
Austin Schuhbffbe8b2023-11-22 21:32:05 -0800848
849def do_package(partition):
850 tarball = datetime.date.today().strftime(
851 f"{os.getcwd()}/%Y-%m-%d-bookworm-arm64-nvidia-rootfs.tar")
852 print(tarball)
853
854 subprocess.run([
855 "sudo",
856 "tar",
857 "--sort=name",
858 "--mtime=0",
859 "--owner=0",
860 "--group=0",
861 "--numeric-owner",
862 "--exclude=./usr/share/ca-certificates",
863 "--exclude=./home",
864 "--exclude=./root",
865 "--exclude=./usr/src",
866 "--exclude=./usr/lib/mesa-diverted",
867 "--exclude=./usr/bin/X11",
868 "--exclude=./usr/lib/systemd/system/system-systemd*cryptsetup.slice",
869 "--exclude=./dev",
870 "--exclude=./usr/local/cuda-11.8/bin/fatbinary",
871 "--exclude=./usr/local/cuda-11.8/bin/ptxas",
872 "--exclude=./usr/local/cuda-11.8/include/thrust",
873 "--exclude=./usr/local/cuda-11.8/include/nv",
874 "--exclude=./usr/local/cuda-11.8/include/cuda",
875 "--exclude=./usr/local/cuda-11.8/include/cub",
Austin Schuh1fc0d482023-12-24 14:40:34 -0800876 "--exclude=./usr/include/cub",
877 "--exclude=./usr/include/nv",
878 "--exclude=./usr/include/thrust",
879 "--exclude=./usr/include/cuda",
Austin Schuhbffbe8b2023-11-22 21:32:05 -0800880 "--exclude=./usr/share",
881 "-cf",
882 tarball,
883 ".",
884 ],
885 cwd=partition,
886 check=True)
887
888 # Pack ptxas and fatbinary into the spots that clang expect them to make compiling easy.
889 nvidia_cuda_toolkit_path = 'nvidia-cuda-toolkit'
890 if not os.path.exists(nvidia_cuda_toolkit_path):
891 os.mkdir(nvidia_cuda_toolkit_path)
892
893 subprocess.run(['apt-get', 'download', 'nvidia-cuda-toolkit'],
894 cwd=nvidia_cuda_toolkit_path,
895 check=True)
896
897 subprocess.run(
898 ['dpkg', '-x',
899 os.listdir(nvidia_cuda_toolkit_path)[0], '.'],
900 cwd=nvidia_cuda_toolkit_path,
901 check=True)
902
903 subprocess.run([
904 "sudo", "tar", "--sort=name", "--mtime=0", "--owner=0", "--group=0",
905 "--numeric-owner",
906 '--transform=s|usr/bin/ptxas|usr/local/cuda-11.8/bin/ptxas|',
907 '--transform=s|usr/bin/fatbinary|usr/local/cuda-11.8/bin/aarch64-unknown-linux-gnu-fatbinary|',
908 "--append", "-f", tarball, "usr/bin/fatbinary", "usr/bin/ptxas"
909 ],
910 cwd=nvidia_cuda_toolkit_path,
911 check=True)
912
913 subprocess.run(["sha256sum", tarball], check=True)
914
915
Austin Schuh51014832023-10-20 17:44:45 -0700916def main():
917 check_required_deps(REQUIRED_DEPS)
918
919 new_image = not os.path.exists(IMAGE)
920 if new_image:
921 make_image(IMAGE)
922
923 with scoped_mount(IMAGE) as partition:
924 if new_image:
925 subprocess.run([
926 "sudo", "debootstrap", "--arch=arm64", "--no-check-gpg",
927 "--foreign", "bookworm", partition,
928 "http://deb.debian.org/debian/"
929 ],
930 check=True)
931
932 subprocess.run([
933 "sudo", "cp", "/usr/bin/qemu-aarch64-static",
934 f"{partition}/usr/bin/"
935 ],
936 check=True)
937
938 global PARTITION
939 PARTITION = partition
940
941 if new_image:
942 target(["/debootstrap/debootstrap", "--second-stage"])
943
944 target([
945 "useradd", "-m", "-p",
946 '$y$j9T$85lzhdky63CTj.two7Zj20$pVY53UR0VebErMlm8peyrEjmxeiRw/rfXfx..9.xet1',
947 '-s', '/bin/bash', 'pi'
948 ])
949 target(["addgroup", "debug"])
950 target(["addgroup", "crypto"])
951 target(["addgroup", "trusty"])
952
953 if not os.path.exists(
954 f"{partition}/etc/apt/sources.list.d/bullseye-backports.list"):
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -0700955 copyfile("root:root", "644",
Austin Schuh51014832023-10-20 17:44:45 -0700956 "etc/apt/sources.list.d/bullseye-backports.list")
957 target(["apt-get", "update"])
958
959 target([
960 "apt-get", "-y", "install", "gnupg", "wget", "systemd",
961 "systemd-resolved", "locales"
962 ])
963
964 target(["localedef", "-i", "en_US", "-f", "UTF-8", "en_US.UTF-8"])
965
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -0700966 target_mkdir("root:root", "755", "run/systemd")
967 target_mkdir("systemd-resolve:systemd-resolve", "755",
Austin Schuh51014832023-10-20 17:44:45 -0700968 "run/systemd/resolve")
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -0700969 copyfile("systemd-resolve:systemd-resolve", "644",
Austin Schuh51014832023-10-20 17:44:45 -0700970 "run/systemd/resolve/stub-resolv.conf")
971 target(["systemctl", "enable", "systemd-resolved"])
972
973 target([
974 "apt-get", "-y", "install", "bpfcc-tools", "sudo",
975 "openssh-server", "python3", "bash-completion", "git", "v4l-utils",
976 "cpufrequtils", "pmount", "rsync", "vim-nox", "chrony",
977 "libopencv-calib3d406", "libopencv-contrib406",
978 "libopencv-core406", "libopencv-features2d406",
979 "libopencv-flann406", "libopencv-highgui406",
980 "libopencv-imgcodecs406", "libopencv-imgproc406",
981 "libopencv-ml406", "libopencv-objdetect406", "libopencv-photo406",
982 "libopencv-shape406", "libopencv-stitching406",
983 "libopencv-superres406", "libopencv-video406",
984 "libopencv-videoio406", "libopencv-videostab406",
Austin Schuhbffbe8b2023-11-22 21:32:05 -0800985 "libopencv-viz406", "libopencv-dev", "libnice10", "pmount",
986 "libnice-dev", "feh", "libgstreamer1.0-0",
987 "libgstreamer-plugins-base1.0-0", "libgstreamer-plugins-bad1.0-0",
988 "gstreamer1.0-plugins-base", "gstreamer1.0-plugins-good",
989 "gstreamer1.0-plugins-bad", "gstreamer1.0-plugins-ugly",
990 "gstreamer1.0-nice", "usbutils", "locales", "trace-cmd", "clinfo",
991 "jq", "strace", "sysstat", "lm-sensors", "can-utils", "xfsprogs",
992 "gstreamer1.0-tools", "bridge-utils", "net-tools", "apt-file",
Austin Schuh6243c762023-12-24 14:42:45 -0800993 "parted", "xxd", "libv4l-dev", "file", "pkexec", "libxkbfile1",
994 "gdb"
Austin Schuh51014832023-10-20 17:44:45 -0700995 ])
996 target(["apt-get", "clean"])
997
998 target(["usermod", "-a", "-G", "sudo", "pi"])
999 target(["usermod", "-a", "-G", "video", "pi"])
1000 target(["usermod", "-a", "-G", "systemd-journal", "pi"])
1001 target(["usermod", "-a", "-G", "dialout", "pi"])
1002
1003 virtual_packages = [
1004 'libglib-2.0-0', 'libglvnd', 'libgtk-3-0', 'libxcb-glx', 'wayland'
1005 ]
1006
1007 install_virtual_packages(virtual_packages)
1008
1009 yocto_package_names = [
Austin Schuh86d980e2023-10-20 22:44:47 -07001010 'tegra-argus-daemon',
1011 'tegra-firmware',
1012 'tegra-firmware-tegra234',
1013 'tegra-firmware-vic',
1014 'tegra-firmware-xusb',
1015 'tegra-libraries-argus-daemon-base',
1016 'tegra-libraries-camera',
1017 'tegra-libraries-core',
1018 'tegra-libraries-cuda',
1019 'tegra-libraries-eglcore',
1020 'tegra-libraries-glescore',
1021 'tegra-libraries-glxcore',
1022 'tegra-libraries-multimedia',
Austin Schuh51014832023-10-20 17:44:45 -07001023 'tegra-libraries-multimedia-utils',
Austin Schuh86d980e2023-10-20 22:44:47 -07001024 'tegra-libraries-multimedia-v4l',
1025 'tegra-libraries-nvsci',
1026 'tegra-libraries-vulkan',
1027 'tegra-nvphs',
1028 'tegra-nvphs-base',
1029 'libnvidia-egl-wayland1',
1030 'tegra-mmapi',
1031 'tegra-mmapi-dev',
1032 'cuda-cudart-11-8',
1033 'cuda-cudart-11-8-dev',
1034 'cuda-cudart-11-8-stubs',
1035 'libcurand-11-8',
1036 'libcurand-11-8-dev',
1037 'libcurand-11-8-stubs',
1038 'cuda-nvcc-11-8',
1039 'tegra-cmake-overrides',
1040 'cuda-target-environment',
1041 'libnpp-11-8',
1042 'libnpp-11-8-stubs',
1043 'libnpp-11-8-dev',
1044 'cuda-cccl-11-8',
1045 'cuda-nvcc-11-8',
1046 'cuda-nvcc-headers-11-8',
Austin Schuhbffbe8b2023-11-22 21:32:05 -08001047 'nsight-systems-cli',
1048 'nsight-systems-cli-qdstrmimporter',
Austin Schuh6243c762023-12-24 14:42:45 -08001049 'tegra-tools-jetson-clocks',
Austin Schuh51014832023-10-20 17:44:45 -07001050 ]
1051 yocto_packages = list_yocto_packages()
1052 packages = list_packages()
1053
1054 install_packages([
1055 package for package in yocto_packages
1056 if package.name in yocto_package_names
1057 ], packages)
1058
1059 # Now, install the kernel and modules after all the normal packages are in.
1060 yocto_packages_to_install = [
1061 package for package in yocto_packages
1062 if (package.name.startswith('kernel-module-') or package.name.
1063 startswith('kernel-5.10') or package.name == 'kernel-modules')
1064 ]
1065
1066 packages_to_remove = []
1067
1068 # Remove kernel-module-* packages + kernel- package.
1069 for key in packages:
1070 if key.startswith('kernel-module') or key.startswith(
1071 'kernel-5.10'):
1072 already_installed = False
1073 for index, yocto_package in enumerate(
1074 yocto_packages_to_install):
1075 if key == yocto_package.name and packages[
1076 key] == yocto_package.version:
Austin Schuh51014832023-10-20 17:44:45 -07001077 already_installed = True
1078 del yocto_packages_to_install[index]
1079 break
1080 if not already_installed:
1081 packages_to_remove.append(key)
1082
1083 print("Removing", packages_to_remove)
1084 if len(packages_to_remove) > 0:
1085 target(['dpkg', '--purge'] + packages_to_remove)
1086 print("Installing",
1087 [package.name for package in yocto_packages_to_install])
1088
1089 install_packages(yocto_packages_to_install, packages)
1090
1091 target(["systemctl", "enable", "nvargus-daemon.service"])
1092
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -07001093 copyfile("root:root", "644", "etc/sysctl.d/sctp.conf")
1094 copyfile("root:root", "644", "etc/systemd/logind.conf")
1095 copyfile("root:root", "555",
Austin Schuh51014832023-10-20 17:44:45 -07001096 "etc/bash_completion.d/aos_dump_autocomplete")
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -07001097 copyfile("root:root", "644", "etc/security/limits.d/rt.conf")
1098 copyfile("root:root", "644", "etc/systemd/system/usb-mount@.service")
1099 copyfile("root:root", "644", "etc/chrony/chrony.conf")
1100 target_mkdir("root:root", "700", "root/bin")
1101 target_mkdir("pi:pi", "755", "home/pi/.ssh")
1102 copyfile("pi:pi", "600", "home/pi/.ssh/authorized_keys")
1103 target_mkdir("root:root", "700", "root/bin")
1104 copyfile("root:root", "644", "etc/systemd/system/grow-rootfs.service")
Austin Schuh6243c762023-12-24 14:42:45 -08001105 copyfile("root:root", "644",
1106 "etc/systemd/system/jetson-clocks.service")
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -07001107 copyfile("root:root", "500", "root/bin/change_hostname.sh")
1108 copyfile("root:root", "700", "root/trace.sh")
1109 copyfile("root:root", "440", "etc/sudoers")
1110 copyfile("root:root", "644", "etc/fstab")
1111 copyfile("root:root", "644",
Austin Schuh51014832023-10-20 17:44:45 -07001112 "var/nvidia/nvcam/settings/camera_overrides.isp")
Austin Schuhbffbe8b2023-11-22 21:32:05 -08001113 copyfile("root.root", "644", "/etc/ld.so.conf.d/yocto.conf")
Austin Schuh51014832023-10-20 17:44:45 -07001114
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -07001115 target_mkdir("root:root", "755", "etc/systemd/network")
1116 copyfile("root:root", "644", "etc/systemd/network/eth0.network")
Austin Schuh8de7def2024-01-01 12:47:02 -08001117 copyfile("root:root", "644", "etc/systemd/network/80-cana.network")
1118 copyfile("root:root", "644", "etc/systemd/network/80-canb.network")
1119 copyfile("root:root", "644", "etc/systemd/network/80-canc.network")
Austin Schuhbffbe8b2023-11-22 21:32:05 -08001120 copyfile("root:root", "644", "etc/udev/rules.d/nvidia.rules")
Austin Schuh8de7def2024-01-01 12:47:02 -08001121 copyfile("root:root", "644", "etc/udev/rules.d/can.rules")
Austin Schuh51014832023-10-20 17:44:45 -07001122 target(["/root/bin/change_hostname.sh", "pi-971-1"])
1123
1124 target(["systemctl", "enable", "systemd-networkd"])
1125 target(["systemctl", "enable", "grow-rootfs"])
Austin Schuh6243c762023-12-24 14:42:45 -08001126 target(["systemctl", "enable", "jetson-clocks"])
Austin Schuhbffbe8b2023-11-22 21:32:05 -08001127
Austin Schuh51014832023-10-20 17:44:45 -07001128 target(["apt-file", "update"])
1129
1130 target(["ldconfig"])
1131
1132 if not os.path.exists(f"{partition}/home/pi/.dotfiles"):
1133 pi_target_unescaped(
1134 "cd /home/pi/ && git clone --separate-git-dir=/home/pi/.dotfiles https://github.com/AustinSchuh/.dotfiles.git tmpdotfiles && rsync --recursive --verbose --exclude .git tmpdotfiles/ /home/pi/ && rm -r tmpdotfiles && git --git-dir=/home/pi/.dotfiles/ --work-tree=/home/pi/ config --local status.showUntrackedFiles no"
1135 )
1136 pi_target(["vim", "-c", "\":qa!\""])
1137
1138 target_unescaped(
1139 "cd /root/ && git clone --separate-git-dir=/root/.dotfiles https://github.com/AustinSchuh/.dotfiles.git tmpdotfiles && rsync --recursive --verbose --exclude .git tmpdotfiles/ /root/ && rm -r tmpdotfiles && git --git-dir=/root/.dotfiles/ --work-tree=/root/ config --local status.showUntrackedFiles no"
1140 )
1141 target(["vim", "-c", "\":qa!\""])
1142
Austin Schuhbffbe8b2023-11-22 21:32:05 -08001143 generate_build_file(partition)
Austin Schuh86d980e2023-10-20 22:44:47 -07001144
Austin Schuhbffbe8b2023-11-22 21:32:05 -08001145 do_package(partition)
Austin Schuh86d980e2023-10-20 22:44:47 -07001146
Austin Schuh51014832023-10-20 17:44:45 -07001147
1148if __name__ == '__main__':
1149 main()