blob: 92ff11ca560a64de1b02942c062f528fc6fa2aae [file] [log] [blame]
Austin Schuh51014832023-10-20 17:44:45 -07001#!/usr/bin/python3
2
Austin Schuhbffbe8b2023-11-22 21:32:05 -08003from __future__ import annotations
4import apt_pkg
Austin Schuh51014832023-10-20 17:44:45 -07005import collections
Austin Schuhbffbe8b2023-11-22 21:32:05 -08006import contextlib
Austin Schuh86d980e2023-10-20 22:44:47 -07007import datetime
Austin Schuhbffbe8b2023-11-22 21:32:05 -08008import functools
9import jinja2
Austin Schuh51014832023-10-20 17:44:45 -070010import os
Austin Schuhbffbe8b2023-11-22 21:32:05 -080011import pathlib
Jim Ostrowskif9396de2024-01-22 15:13:21 -080012import platform
Austin Schuhbffbe8b2023-11-22 21:32:05 -080013import re
14import shlex
Austin Schuh86d980e2023-10-20 22:44:47 -070015import shutil
Austin Schuhbffbe8b2023-11-22 21:32:05 -080016import subprocess
Austin Schuh51014832023-10-20 17:44:45 -070017
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -070018# Name of debian image to be created/modified
Austin Schuh51014832023-10-20 17:44:45 -070019IMAGE = "arm64_bookworm_debian_yocto.img"
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -070020
21# Path to yocto build for the orin (using meta-frc971)
Austin Schuh51014832023-10-20 17:44:45 -070022YOCTO = "/home/austin/local/jetpack/robot-yocto/build"
23
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -070024REQUIRED_DEPS = ["debootstrap", "u-boot-tools", "xfsprogs"]
Austin Schuh51014832023-10-20 17:44:45 -070025
Austin Schuhbffbe8b2023-11-22 21:32:05 -080026apt_pkg.init_system()
27
Austin Schuh51014832023-10-20 17:44:45 -070028
29@contextlib.contextmanager
30def scoped_loopback(image):
31 """Mounts an image as a loop back device."""
32 result = subprocess.run(["sudo", "losetup", "--show", "-f", image],
33 check=True,
34 stdout=subprocess.PIPE)
35 device = result.stdout.decode('utf-8').strip()
36 print("Mounted", image, "to", repr(device))
37 try:
38 yield device
39 finally:
40 subprocess.run(["sudo", "losetup", "-d", device], check=True)
41
42
43@contextlib.contextmanager
44def scoped_mount(image):
45 """Mounts an image as a partition."""
46 partition = f"{image}.partition"
47 try:
48 os.mkdir(partition)
49 except FileExistsError:
50 pass
51
52 result = subprocess.run(["sudo", "mount", "-o", "loop", image, partition],
53 check=True)
54
55 try:
56 yield partition
57 finally:
58 subprocess.run(
59 ["sudo", "rm", f"{partition}/usr/bin/qemu-aarch64-static"])
60 subprocess.run(["sudo", "umount", partition], check=True)
61
62
Jim Ostrowskif9396de2024-01-22 15:13:21 -080063def check_buildifier():
64 """Checks if buildifier is in the path"""
65 result = subprocess.run(["which", "buildifier"], stdout=subprocess.PIPE)
66 if result.stdout.decode('utf-8') == "":
67 return False
68 else:
69 return True
70
71
Austin Schuh51014832023-10-20 17:44:45 -070072def check_required_deps(deps):
73 """Checks if the provided list of dependencies is installed."""
74 missing_deps = []
75 for dep in deps:
76 result = subprocess.run(["dpkg-query", "-W", "-f='${Status}'", dep],
77 check=True,
78 stdout=subprocess.PIPE)
79
80 if "install ok installed" not in result.stdout.decode('utf-8'):
81 missing_deps.append(dep)
82
83 if len(missing_deps) > 0:
84 print("Missing dependencies, please install:")
85 print("sudo apt-get install", " ".join(missing_deps))
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -070086 exit()
Austin Schuh51014832023-10-20 17:44:45 -070087
88
89def make_image(image):
90 """Makes an image and creates an xfs filesystem on it."""
Jim Ostrowskif9396de2024-01-22 15:13:21 -080091 print("--> Creating NEW image ", f"{image}")
Austin Schuh51014832023-10-20 17:44:45 -070092 result = subprocess.run([
93 "dd", "if=/dev/zero", f"of={image}", "bs=1", "count=0",
94 "seek=8589934592"
95 ],
96 check=True)
97
98 with scoped_loopback(image) as loopback:
99 subprocess.run([
100 "sudo", "mkfs.xfs", "-d", "su=128k", "-d", "sw=1", "-L", "rootfs",
101 loopback
102 ],
103 check=True)
104
105
106def target_unescaped(cmd):
107 """Runs a command as root with bash -c cmd, ie without escaping."""
108 subprocess.run([
109 "sudo", "chroot", "--userspec=0:0", f"{PARTITION}",
110 "qemu-aarch64-static", "/bin/bash", "-c", cmd
111 ],
112 check=True)
113
114
115def target(cmd):
116 """Runs a command as root with escaping."""
117 target_unescaped(shlex.join([shlex.quote(c) for c in cmd]))
118
119
120def pi_target_unescaped(cmd):
121 """Runs a command as pi with bash -c cmd, ie without escaping."""
122 subprocess.run([
123 "sudo", "chroot", "--userspec=pi:pi", "--groups=pi", f"{PARTITION}",
124 "qemu-aarch64-static", "/bin/bash", "-c", cmd
125 ],
126 check=True)
127
128
129def pi_target(cmd):
130 """Runs a command as pi with escaping."""
131 pi_target_unescaped(shlex.join([shlex.quote(c) for c in cmd]))
132
133
134def copyfile(owner, permissions, file):
135 """Copies a file from contents/{file} with the provided owner and permissions."""
136 print("copyfile", owner, permissions, file)
137 subprocess.run(["sudo", "cp", f"contents/{file}", f"{PARTITION}/{file}"],
138 check=True)
139 subprocess.run(["sudo", "chmod", permissions, f"{PARTITION}/{file}"],
140 check=True)
141 target(["chown", owner, f"/{file}"])
142
143
144def target_mkdir(owner_group, permissions, folder):
145 """Creates a directory recursively with the provided permissions and ownership."""
146 print("target_mkdir", owner_group, permissions, folder)
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -0700147 owner, group = owner_group.split(':')
Austin Schuh51014832023-10-20 17:44:45 -0700148 target(
149 ["install", "-d", "-m", permissions, "-o", owner, "-g", group, folder])
150
151
152def list_packages():
153 """Lists all installed packages.
154
155 Returns:
156 A dictionary with keys as packages, and values as versions.
157 """
158 result = subprocess.run([
159 "sudo", "chroot", "--userspec=0:0", f"{PARTITION}",
160 "qemu-aarch64-static", "/bin/bash", "-c",
161 "dpkg-query -W -f='${Package} ${Version}\n'"
162 ],
163 check=True,
164 stdout=subprocess.PIPE)
165
166 device = result.stdout.decode('utf-8').strip()
167
168 r = {}
169 for line in result.stdout.decode('utf-8').strip().split('\n'):
170 package, version = line.split(' ')
171 r[package] = version
172
173 return r
174
175
176def list_yocto_packages():
177 """Lists all packages in the Yocto folder.
178
179 Returns:
180 list of Package classes.
181 """
182 Package = collections.namedtuple(
183 'Package', ['path', 'name', 'version', 'architecture'])
184 result = []
185 pathlist = pathlib.Path(f"{YOCTO}/tmp/deploy/deb").glob('**/*.deb')
186 for path in pathlist:
187 # Strip off the path, .deb, and split on _ to parse the package info.
188 s = os.path.basename(str(path))[:-4].split('_')
189 result.append(Package(str(path), s[0], s[1], s[2]))
190
191 return result
192
193
194def install_packages(new_packages, existing_packages):
195 """Installs the provided yocto packages, if they are new."""
196 # To install the yocto packages, first copy them into a folder in /tmp, then install them, then clean the folder up.
197 target(["mkdir", "-p", "/tmp/yocto_packages"])
198 try:
199 to_install = []
200 for package in new_packages:
201 if package.name in existing_packages and existing_packages[
202 package.name] == package.version:
203 print('Skipping', package)
204 continue
205
206 subprocess.run([
207 "sudo", "cp", package.path,
208 f"{PARTITION}/tmp/yocto_packages/{os.path.basename(package.path)}"
209 ],
210 check=True)
211 to_install.append(package)
212
213 if len(to_install) > 0:
214 target(["dpkg", "-i"] + [
215 f"/tmp/yocto_packages/{os.path.basename(package.path)}"
216 for package in to_install
217 ])
218
219 finally:
220 target(["rm", "-rf", "/tmp/yocto_packages"])
221
222
223def install_virtual_packages(virtual_packages):
224 """Builds and installs the provided virtual packages."""
225 try:
226 target(["mkdir", "-p", "/tmp/yocto_packages"])
227 for virtual_package in virtual_packages:
228 subprocess.run(
229 ["dpkg-deb", "--build", f"virtual_packages/{virtual_package}"],
230 check=True)
231 subprocess.run([
232 "sudo", "cp", f"virtual_packages/{virtual_package}.deb",
233 f"{PARTITION}/tmp/yocto_packages/{virtual_package}.deb"
234 ],
235 check=True)
236
237 target(["dpkg", "-i"] + [
238 f"/tmp/yocto_packages/{package}.deb"
239 for package in virtual_packages
240 ])
241
Austin Schuhbffbe8b2023-11-22 21:32:05 -0800242 for virtual_package in virtual_packages:
243 subprocess.run(["rm", f"virtual_packages/{virtual_package}.deb"],
244 check=True)
245
Austin Schuh51014832023-10-20 17:44:45 -0700246 finally:
247 target(["rm", "-rf", "/tmp/yocto_packages"])
248
249
Austin Schuhbffbe8b2023-11-22 21:32:05 -0800250class NameVersion:
251 """ Class representing a package name and optionally a version constraint. """
252
253 def __init__(self, nameversion: str):
254 # We are processing package names here like:
255 # python3:any
256 # python3-markdown (= 3.4.1-2)
257 if '(' in nameversion:
258 s = nameversion.split(' (')
259 self.name = s[0].strip()
260
261 v = s[1][:-1].split(' ')
262
263 self.operator = v[0]
264 self.version = v[1]
265 else:
266 self.name = nameversion.strip()
267 self.operator = None
268 self.version = None
269
270 # Rip off :amd64 or :aarch64 from the name if it is here.
271 if ':' in self.name:
272 self.name = self.name.split(':')[0]
273
274 def matches(self, other: NameVersion) -> bool:
275 """If self meets the requirements defined by other."""
276 if other.name != self.name:
277 return False
278
279 if other.operator is None:
280 return True
281
282 vc = apt_pkg.version_compare(self.version, other.version)
283 if vc < 0:
284 return other.operator in ('<=', '<<')
285 elif vc == 0:
286 return other.operator in ('=', '>=', '<=')
287 elif vc > 0:
288 return other.operator in ('>=', '>>')
289
290 def __repr__(self) -> str:
291 if self.operator is not None:
292 return f"NameVersion({self.name} ({self.operator} {self.version}))"
293 else:
294 return f"NameVersion({self.name})"
295
296
297class Package:
298
299 def __init__(self, name: str, provides: str, version: str, depends: str,
300 files: list[str]):
301 self.name = NameVersion(f"{name} (= {version})")
302
303 self.provides = [self.name]
304
305 if provides:
306 for package_and_version in provides.split(","):
307 self.provides.append(NameVersion(package_and_version))
308
309 self.depends = []
310 if depends:
311 for package_and_version in depends.split(", "):
312 if ' | ' in package_and_version:
313 oneof = []
314 for oneof_package_and_version in package_and_version.split(
315 ' | '):
316 oneof.append(NameVersion(oneof_package_and_version))
317 self.depends.append(oneof)
318 else:
319 self.depends.append([NameVersion(package_and_version)])
320
321 self.files = files
322
323 def update_filetypes(self, directories: set[str], symlinks: dict[str,
324 str]):
325 if hasattr(self, 'directories') or hasattr(self, 'symlinks'):
326 return
327
328 self.directories = []
329 self.symlinks = dict()
330 files = []
331 for f in self.files:
332 if f in directories:
333 self.directories.append(f)
334 elif f in symlinks:
335 self.symlinks[f] = symlinks[f]
336 else:
337 files.append(f)
338
339 self.files = files
340
341 def matches(self, other: NameVersion) -> bool:
342 """If self meets the requirements defined by other."""
343 return any(p.matches(other) for p in self.provides)
344
345 def resolved_depends(self, packages: dict[Package]) -> list[Package]:
346 result = set()
347
348 # The dependencies are lists of lists of dependencies. At least one
349 # element from each inner list needs to match for it to be valid. Most
350 # of the dependencies are going to be a single element list.
351 for p_or_list in self.depends:
352 resolved_set = set()
353 for oneof_package in p_or_list:
354 if oneof_package.name not in packages:
355 continue
356
357 resolved_oneof_package = packages[oneof_package.name]
358 if resolved_oneof_package.matches(oneof_package):
359 resolved_set.add(resolved_oneof_package)
360
361 if len(resolved_set) == 0:
362 raise RuntimeError(
363 f"Failed to find dependencies for {p_or_list}: {repr(self)}"
364 )
365
366 result.update(resolved_set)
367
368 return sorted(list(result), key=lambda x: x.name.name)
369
370 def headers(self) -> list[str]:
371 return [h for h in self.files if h.startswith('/usr/include')]
372
373 def objects(self) -> list[str]:
374 result = []
375 for file in self.files:
376 if not file.startswith('/usr'):
377 continue
378
379 # Gotta love GDB extensions ...libc.so....py. Ignore them.
380 if file.endswith('.py'):
381 continue
382
383 # We want to find things like libfoo.so.1.2.3.4.5. The .so needs to be last.
384 opath = file
385 found_so = False
386 while True:
387 opath, ext = os.path.splitext(opath)
388 if ext == '':
389 break
390 elif ext == '.so':
391 found_so = True
392 else:
393 found_so = False
394
395 if found_so:
396 result.append(file)
397
398 return sorted(result)
399
400 def __repr__(self) -> str:
401 return f"{{ {repr(self.provides[0])}, \"provides\": {repr(self.provides[1:])}, \"depends\": {repr(self.depends)} }}"
402
403
404class PkgConfig:
405
406 def __init__(self, contents, package):
407 # The pkgconfig file format lets you specify variables and the expand
408 # them into the various fields. These are in the form
409 # asdf=15234
410 self.variables = dict()
411
412 self.package = package
413 self.libs = []
414 self.cflags = []
415 for line in contents.split('\n'):
416 line = line.strip()
417 # Parse everything so we learn if a new field shows up we don't
418 # know how to parse.
419 if line == '':
420 pass
421 elif line[0] == '#':
422 pass
423 elif line.startswith('Name:'):
424 self.name = self.expand(line.removeprefix('Name:').strip())
425 elif line.startswith('Description:'):
426 self.description = self.expand(
427 line.removeprefix('Description:').strip())
428 elif line.startswith('Version:'):
429 self.version = self.expand(
430 line.removeprefix('Version:').strip())
431 elif line.startswith('Libs:'):
432 self.libs = self.expand(
433 line.removeprefix('Libs:').strip()).split()
434 elif line.startswith('Cflags:'):
435 self.cflags = self.expand(
436 line.removeprefix('Cflags:').strip()).split()
437 elif line.startswith('URL:'):
438 pass
439 elif line.startswith('Cflags.private:'):
440 pass
441 elif line.startswith('Requires:'):
442 pass
443 elif line.startswith('Requires.private:'):
444 pass
445 elif line.startswith('Libs.private:'):
446 pass
447 elif line.startswith('Conflicts:'):
448 pass
449 elif re.match('^[-a-zA-Z_0-9]* *=.*$', line):
450 split_line = re.split(' *= *', line)
451 self.variables[split_line[0]] = self.expand(split_line[1])
452 else:
453 raise ValueError('Unknown line in pkgconfig file')
454
455 if self.name is None:
456 raise RuntimeError("Failed to find Name.")
457
458 def expand(self, line: str) -> str:
459 """ Expands a string with variable expansions in it like bash (${foo}). """
460 for var in self.variables:
461 line = line.replace('${' + var + '}', self.variables[var])
462 return line
463
464
465class Filesystem:
466
467 def __init__(self, partition):
468 self.partition = partition
469 # TODO(austin): I really want to be able to run this on an amd64
470 # filesystem too, which won't work with qemu-aarch64-static. Pull it
471 # into a library.
472 result = subprocess.run([
473 "sudo", "chroot", "--userspec=0:0", f"{self.partition}",
474 "qemu-aarch64-static", "/bin/bash", "-c",
475 "dpkg-query -W -f='Version: ${Version}\nPackage: ${Package}\nProvides: ${Provides}\nDepends: ${Depends}\n${db-fsys:Files}--\n'"
476 ],
477 check=True,
478 stdout=subprocess.PIPE)
479
480 # Mapping from all package names (str) to their corresponding Package
481 # objects for that package.
482 self.packages = dict()
483
484 package_in_progress = {'files': []}
485 files = set()
486 for line in result.stdout.decode('utf-8').strip().split('\n'):
487 if line == '--':
488 # We found the end of line deliminator, save the package and
489 # clear everything out.
490 new_package = Package(package_in_progress['Package'],
491 package_in_progress['Provides'],
492 package_in_progress['Version'],
493 package_in_progress['Depends'],
494 package_in_progress['files'])
495
496 for provides in new_package.provides:
497 self.packages[provides.name] = new_package
498
499 # Wipe everything so we detect if any fields are missing.
500 package_in_progress = {'files': []}
501 elif line.startswith("Version: "):
502 package_in_progress['Version'] = line.removeprefix("Version: ")
503 elif line.startswith("Package: "):
504 package_in_progress['Package'] = line.removeprefix("Package: ")
505 elif line.startswith("Provides: "):
506 package_in_progress['Provides'] = line.removeprefix(
507 "Provides: ")
508 elif line.startswith("Depends: "):
509 package_in_progress['Depends'] = line.removeprefix("Depends: ")
510 else:
511 assert (line.startswith(' '))
512 f = line.removeprefix(' ')
513 package_in_progress['files'].append(f)
514 files.add(f)
515
516 self.directories = set()
517 self.symlinks = dict()
518
519 for root, walked_dirs, walked_files in os.walk(self.partition):
520 for entry in walked_files + walked_dirs:
521 full_target = os.path.join(root, entry)
522 if pathlib.Path(full_target).is_symlink():
523 target = full_target.removeprefix(self.partition)
524 self.symlinks[target] = os.readlink(full_target)
525
526 for file in files:
527 full_target = f"{self.partition}/{file}"
528 try:
529 if pathlib.Path(full_target).is_symlink():
530 self.symlinks[file] = os.readlink(full_target)
531
532 if pathlib.Path(full_target).is_dir():
533 self.directories.add(file)
534 except PermissionError:
535 # Assume it is a file...
536 print("Failed to read", file)
537 pass
538
539 # Directories are all the things before the last /
540 for parent in pathlib.Path(file).parents:
541 self.directories.add(parent)
542
543 # Now, populate self.files with a mapping from each file to the owning
544 # package so we can do file ownership lookups.
545 visited = set()
546 self.files = dict()
547 for package in self.packages.values():
548 if package in visited:
549 continue
550 visited.add(package)
551
552 for f in package.files:
553 if f in self.directories:
554 continue
555
556 if f in self.files:
557 print("Duplicate file", repr(f), ' current', package,
558 ' already', self.files[f])
559 if not f.startswith('/usr/share'):
560 assert (f not in self.files)
561 self.files[f] = package
562
563 # For each package, update the file list to track dependencies and symlinks correctly.
564 for p in self.packages.values():
565 p.update_filetypes(self.directories, self.symlinks)
566
567 # Print out all the libraries and where they live as known to ldconfig
568 result = subprocess.run(
Jim Ostrowskif9396de2024-01-22 15:13:21 -0800569 [
570 '/usr/sbin/ldconfig', '-C',
571 f'{self.partition}/etc/ld.so.cache', '-p'
572 ],
Austin Schuhbffbe8b2023-11-22 21:32:05 -0800573 check=True,
574 stdout=subprocess.PIPE,
575 )
576
577 self.ldconfig_cache = dict()
578 for line in result.stdout.decode('utf-8').split('\n'):
579 if line.startswith('\t'):
580 split_line = re.split(' \\(libc6,(AArch64|x86-64)\\) => ',
581 line.strip())
582 self.ldconfig_cache[split_line[0]] = split_line[2]
583
584 self.pkgcfg = dict()
585 for pkgconfig in [
586 '/usr/local/lib/aarch64-linux-gnu/pkgconfig',
587 '/usr/local/lib/pkgconfig',
588 '/usr/local/share/pkgconfig',
589 '/usr/lib/aarch64-linux-gnu/pkgconfig',
590 '/usr/lib/pkgconfig',
591 '/usr/share/pkgconfig',
592 ]:
593 candidate_folder = f"{self.partition}/{pkgconfig}"
594 if not os.path.exists(candidate_folder):
595 continue
596
597 for f in os.listdir(candidate_folder):
598 full_filename = f"{candidate_folder}/{f}"
599 if pathlib.Path(full_filename).is_dir():
600 continue
601 if not f.endswith('.pc'):
602 continue
603
604 package_name = f.removesuffix('.pc')
605
606 with open(f"{candidate_folder}/{f}", "r") as file:
607 self.pkgcfg[package_name] = PkgConfig(
608 file.read(), self.files[f'{pkgconfig}/{f}'])
609
610 def resolve_symlink(self, path: str) -> str:
611 """ Implements symlink resolution using self.symlinks. """
612 # Only need to support absolute links since we don't have a concept of cwd.
613
614 # Implements the symlink algorithm in
615 # https://android.googlesource.com/platform/bionic.git/+/android-4.0.1_r1/libc/bionic/realpath.c
616 assert (path[0] == '/')
617
618 left = path.split('/')[1:]
619
620 if len(path) == 0:
621 return path
622
623 resolved = ['']
624
625 while len(left) > 0:
626 if left[0] == '.':
627 left = left[1:]
628 elif left[0] == '..':
629 assert (len(resolved) >= 1)
630 resolved = resolved[:-1]
631 left = left[1:]
632 else:
633 resolved.append(left[0])
634 merged = '/'.join(resolved)
635 if merged in self.symlinks:
636 symlink = self.symlinks[merged]
637 # Absolute symlink, blow away the previously accumulated path
638 if symlink[0] == '/':
639 resolved = ['']
640 left = symlink[1:].split('/') + left[1:]
641 else:
642 # Relative symlink, replace the symlink name in the path with the newly found target.
643 resolved = resolved[:-1]
644 left = symlink.split('/') + left[1:]
645 else:
646 left = left[1:]
647
648 return '/'.join(resolved)
649
650 def exists(self, path: str) -> bool:
651 if path in self.files or path in self.symlinks or path in self.directories:
652 return True
653 return False
654
655 def resolve_object(self,
656 obj: str,
657 requesting_obj: str | None = None) -> str:
658 if obj in self.ldconfig_cache:
659 return self.resolve_symlink(self.ldconfig_cache[obj])
660 elif requesting_obj is not None:
661 to_search = os.path.join(os.path.split(requesting_obj)[0], obj)
662 if self.exists(to_search):
663 return self.resolve_symlink(to_search)
664
665 raise FileNotFoundError(obj)
666
667 @functools.cache
668 def object_dependencies(self, obj: str) -> str:
669 result = subprocess.run(
670 ['objdump', '-p', f'{self.partition}/{obj}'],
671 check=True,
672 stdout=subprocess.PIPE,
673 )
674
675 # Part of the example output. We only want NEEDED from the dynamic section.
676 #
677 # RELRO off 0x0000000000128af0 vaddr 0x0000000000128af0 paddr 0x0000000000128af0 align 2**0
678 # filesz 0x0000000000003510 memsz 0x0000000000003510 flags r--
679 #
680 # Dynamic Section:
681 # NEEDED libtinfo.so.6
682 # NEEDED libc.so.6
683 # INIT 0x000000000002f000
684 # FINI 0x00000000000efb94
685
686 deps = []
687 for line in result.stdout.decode('utf-8').split('\n'):
688 if 'NEEDED' in line:
689 deps.append(line.strip().split()[1])
690
691 return deps
692
693
694def generate_build_file(partition):
695 filesystem = Filesystem(partition)
696
697 packages_to_eval = [
698 filesystem.packages['libopencv-dev'],
699 filesystem.packages['libc6-dev'],
700 filesystem.packages['libstdc++-12-dev'],
701 filesystem.packages['libnpp-11-8-dev'],
702 ]
703
704 # Recursively walk the tree using dijkstra's algorithm to generate targets
705 # for each set of headers.
706 print('Walking tree for', [p.name.name for p in packages_to_eval])
707
708 rules = []
709 objs_to_eval = []
710
711 # Set of packages already generated in case our graph hits a package
712 # multiple times.
713 packages_visited_set = set()
714 while packages_to_eval:
715 next_package = packages_to_eval.pop()
716 if next_package in packages_visited_set:
717 continue
718 packages_visited_set.add(next_package)
719
720 hdrs = next_package.headers()
721 objects = next_package.objects()
722
723 deps = []
724 for p in next_package.resolved_depends(filesystem.packages):
725 if p not in packages_visited_set:
726 packages_to_eval.append(p)
727
728 # These two form a circular dependency...
729 # Don't add them since libc6 has no headers in it.
730 if next_package.name.name == 'libgcc-s1' and p.name.name == 'libc6':
731 continue
732
733 deps.append(p.name.name)
734
735 if objects:
736 objs_to_eval += objects
737
738 hdrs.sort()
739 deps.sort()
740 hdrs = [f' "{h[1:]}",\n' for h in hdrs]
741 hdrs_files = ''.join(hdrs)
742 deps_joined = ''.join([f' ":{d}-headers",\n' for d in deps])
743
744 filegroup_srcs = ''.join(
745 [f' "{f[1:]}",\n' for f in next_package.files] +
746 [f' ":{d}-filegroup",\n' for d in deps])
747
748 rules.append(
749 f'filegroup(\n name = "{next_package.name.name}-filegroup",\n srcs = [\n{filegroup_srcs} ],\n)'
750 )
751 rules.append(
752 f'cc_library(\n name = "{next_package.name.name}-headers",\n hdrs = [\n{hdrs_files} ],\n visibility = ["//visibility:public"],\n deps = [\n{deps_joined} ],\n)'
753 )
754
755 skip_set = set()
756 # These two are linker scripts. Since they are soooo deep in the
757 # hierarchy, let's not stress parsing them correctly.
758 skip_set.add('/usr/lib/aarch64-linux-gnu/libc.so')
759 skip_set.add('/usr/lib/gcc/aarch64-linux-gnu/12/libgcc_s.so')
760
761 obj_set = set()
762 obj_set.update(skip_set)
763
764 while objs_to_eval:
765 obj = objs_to_eval.pop()
766 if obj in obj_set:
767 continue
768 obj_set.add(obj)
769
770 deps = filesystem.object_dependencies(obj)
771 resolved_deps = []
772 for d in deps:
773 resolved_obj = filesystem.resolve_object(d, requesting_obj=obj)
774 resolved_deps.append(resolved_obj)
775 if resolved_obj not in obj_set:
776 objs_to_eval.append(resolved_obj)
777
778 resolved_deps.sort()
779 rule_name = obj[1:].replace('/', '_')
780 rule_deps = ''.join([
Austin Schuh8de7def2024-01-01 12:47:02 -0800781 ' ":{}",\n'.format(d[1:].replace('/', '_'))
Austin Schuhbffbe8b2023-11-22 21:32:05 -0800782 for d in resolved_deps if d not in skip_set
783 ])
784 rules.append(
785 f'cc_library(\n name = "{rule_name}",\n srcs = ["{obj[1:]}"],\n deps = [\n{rule_deps} ],\n)'
786 )
787
788 standard_includes = set()
789 standard_includes.add('/usr/include')
790 standard_includes.add('/usr/include/aarch64-linux-gnu')
791 standard_includes.add('/usr/include/x86-64-linux-gnu')
792 for pkg in filesystem.pkgcfg:
793 try:
794 contents = filesystem.pkgcfg[pkg]
795 resolved_libraries = [
796 filesystem.resolve_object('lib' + f.removeprefix('-l') + '.so')
797 for f in contents.libs if f.startswith('-l')
798 ]
799
800 if contents.package not in packages_visited_set:
801 continue
802
803 includes = []
804 for flag in contents.cflags:
805 if flag.startswith('-I/') and flag.removeprefix(
806 '-I') not in standard_includes:
807 includes.append(flag.removeprefix('-I/'))
808
809 rule_deps = ''.join(
810 sorted([
811 ' ":' + l[1:].replace('/', '_') + '",\n'
812 for l in resolved_libraries
813 ] + [f' ":{contents.package.name.name}-headers",\n']))
814 includes.sort()
815 if len(includes) > 0:
816 includes_string = ' includes = ["' + '", "'.join(
817 includes) + '"],\n'
818 else:
819 includes_string = ''
820 rules.append(
821 f'cc_library(\n name = "{pkg}",\n{includes_string} visibility = ["//visibility:public"],\n deps = [\n{rule_deps} ],\n)'
822 )
823 # Look up which package this is from to include the headers
824 # Depend on all the libraries
825 # Parse -I -> includes
826 except FileNotFoundError:
827 print('Failed to instantiate package', repr(pkg))
828 pass
829
830 # Now, we want to figure out what the dependencies of opencv-dev are.
831 # Generate the dependency tree starting from an initial list of packages.
832
833 # Then, figure out how to link the .so's in. Sometimes, multiple libraries exist per .deb, one target for all?
834
835 with open("orin_debian_rootfs.BUILD.template", "r") as file:
836 template = jinja2.Template(file.read())
837
838 substitutions = {
839 "SYSROOT_SRCS": """glob(
840 include = [
841 "include/**",
842 "lib/**",
843 "lib64/**",
844 "usr/include/**",
845 "usr/local/**",
846 "usr/lib/**",
847 "usr/lib64/**",
848 ],
849 exclude = [
850 "usr/share/**",
Austin Schuhbffbe8b2023-11-22 21:32:05 -0800851 ],
852 )""",
853 "RULES": '\n\n'.join(rules),
854 }
855
856 with open("../../compilers/orin_debian_rootfs.BUILD", "w") as file:
857 file.write(template.render(substitutions))
858
Austin Schuh8de7def2024-01-01 12:47:02 -0800859 subprocess.run(['buildifier', "../../compilers/orin_debian_rootfs.BUILD"])
860
Austin Schuhbffbe8b2023-11-22 21:32:05 -0800861
862def do_package(partition):
863 tarball = datetime.date.today().strftime(
864 f"{os.getcwd()}/%Y-%m-%d-bookworm-arm64-nvidia-rootfs.tar")
865 print(tarball)
866
867 subprocess.run([
868 "sudo",
869 "tar",
870 "--sort=name",
871 "--mtime=0",
872 "--owner=0",
873 "--group=0",
874 "--numeric-owner",
875 "--exclude=./usr/share/ca-certificates",
876 "--exclude=./home",
877 "--exclude=./root",
878 "--exclude=./usr/src",
879 "--exclude=./usr/lib/mesa-diverted",
880 "--exclude=./usr/bin/X11",
881 "--exclude=./usr/lib/systemd/system/system-systemd*cryptsetup.slice",
882 "--exclude=./dev",
883 "--exclude=./usr/local/cuda-11.8/bin/fatbinary",
884 "--exclude=./usr/local/cuda-11.8/bin/ptxas",
885 "--exclude=./usr/local/cuda-11.8/include/thrust",
886 "--exclude=./usr/local/cuda-11.8/include/nv",
887 "--exclude=./usr/local/cuda-11.8/include/cuda",
888 "--exclude=./usr/local/cuda-11.8/include/cub",
Austin Schuh1fc0d482023-12-24 14:40:34 -0800889 "--exclude=./usr/include/cub",
890 "--exclude=./usr/include/nv",
891 "--exclude=./usr/include/thrust",
892 "--exclude=./usr/include/cuda",
Austin Schuhbffbe8b2023-11-22 21:32:05 -0800893 "--exclude=./usr/share",
894 "-cf",
895 tarball,
896 ".",
897 ],
898 cwd=partition,
899 check=True)
900
901 # Pack ptxas and fatbinary into the spots that clang expect them to make compiling easy.
902 nvidia_cuda_toolkit_path = 'nvidia-cuda-toolkit'
903 if not os.path.exists(nvidia_cuda_toolkit_path):
904 os.mkdir(nvidia_cuda_toolkit_path)
905
906 subprocess.run(['apt-get', 'download', 'nvidia-cuda-toolkit'],
907 cwd=nvidia_cuda_toolkit_path,
908 check=True)
909
910 subprocess.run(
911 ['dpkg', '-x',
912 os.listdir(nvidia_cuda_toolkit_path)[0], '.'],
913 cwd=nvidia_cuda_toolkit_path,
914 check=True)
915
916 subprocess.run([
917 "sudo", "tar", "--sort=name", "--mtime=0", "--owner=0", "--group=0",
918 "--numeric-owner",
919 '--transform=s|usr/bin/ptxas|usr/local/cuda-11.8/bin/ptxas|',
920 '--transform=s|usr/bin/fatbinary|usr/local/cuda-11.8/bin/aarch64-unknown-linux-gnu-fatbinary|',
921 "--append", "-f", tarball, "usr/bin/fatbinary", "usr/bin/ptxas"
922 ],
923 cwd=nvidia_cuda_toolkit_path,
924 check=True)
925
926 subprocess.run(["sha256sum", tarball], check=True)
927
928
Jim Ostrowskif9396de2024-01-22 15:13:21 -0800929def mount_and_bash():
930 """Helper function to just mount and open a bash interface
931 To run from the CLI, call
932 python3 -c "from build_rootfs import *; mount_and_bash()"
933 """
934 with scoped_mount(IMAGE) as partition:
935 subprocess.run([
936 "sudo", "cp", "/usr/bin/qemu-aarch64-static",
937 f"{partition}/usr/bin/"
938 ],
939 check=True)
940
941 global PARTITION
942 PARTITION = partition
943 target(["/bin/bash"])
944
945
Austin Schuh51014832023-10-20 17:44:45 -0700946def main():
947 check_required_deps(REQUIRED_DEPS)
948
Jim Ostrowskif9396de2024-01-22 15:13:21 -0800949 if not os.path.exists(YOCTO):
950 print("ERROR: Must have YOCTO directory properly specified to run")
951 print("See https://github.com/frc971/meta-frc971/tree/main for info")
952 exit()
953
954 if not check_buildifier():
955 print(
956 "ERROR: Need to have buildifier in the path. Please resolve this."
957 )
958 exit()
959
Austin Schuh51014832023-10-20 17:44:45 -0700960 new_image = not os.path.exists(IMAGE)
961 if new_image:
962 make_image(IMAGE)
963
964 with scoped_mount(IMAGE) as partition:
965 if new_image:
966 subprocess.run([
967 "sudo", "debootstrap", "--arch=arm64", "--no-check-gpg",
968 "--foreign", "bookworm", partition,
969 "http://deb.debian.org/debian/"
970 ],
971 check=True)
972
973 subprocess.run([
974 "sudo", "cp", "/usr/bin/qemu-aarch64-static",
975 f"{partition}/usr/bin/"
976 ],
977 check=True)
978
979 global PARTITION
980 PARTITION = partition
981
982 if new_image:
983 target(["/debootstrap/debootstrap", "--second-stage"])
984
Jim Ostrowskif9396de2024-01-22 15:13:21 -0800985 # Do this unescaped; otherwise, we get quotes in the password
986 target_unescaped(
987 "useradd -m -p \"\$y\$j9T\$85lzhdky63CTj.two7Zj20\$pVY53UR0VebErMlm8peyrEjmxeiRw/rfXfx..9.xet1\" -s /bin/bash pi"
988 )
Austin Schuh51014832023-10-20 17:44:45 -0700989 target(["addgroup", "debug"])
990 target(["addgroup", "crypto"])
991 target(["addgroup", "trusty"])
992
993 if not os.path.exists(
994 f"{partition}/etc/apt/sources.list.d/bullseye-backports.list"):
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -0700995 copyfile("root:root", "644",
Austin Schuh51014832023-10-20 17:44:45 -0700996 "etc/apt/sources.list.d/bullseye-backports.list")
997 target(["apt-get", "update"])
998
Jim Ostrowskif9396de2024-01-22 15:13:21 -0800999 # This is useful in recovering from a partial build, and shouldn't break
1000 # anything otherwise
1001 target(["apt", "--fix-broken", "install"])
1002
Austin Schuh51014832023-10-20 17:44:45 -07001003 target([
1004 "apt-get", "-y", "install", "gnupg", "wget", "systemd",
1005 "systemd-resolved", "locales"
1006 ])
1007
1008 target(["localedef", "-i", "en_US", "-f", "UTF-8", "en_US.UTF-8"])
1009
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -07001010 target_mkdir("root:root", "755", "run/systemd")
1011 target_mkdir("systemd-resolve:systemd-resolve", "755",
Austin Schuh51014832023-10-20 17:44:45 -07001012 "run/systemd/resolve")
Jim Ostrowskif9396de2024-01-22 15:13:21 -08001013
1014 # Need to use the machine's local resolv.conf when running this
1015 # We put a generic file in place at the end when we're done
1016 subprocess.run([
1017 "sudo", "cp", f"/etc/resolv.conf",
1018 f"{PARTITION}/run/systemd/resolve/stub-resolv.conf"
1019 ],
1020 check=True)
1021 subprocess.run([
1022 "sudo", "chmod", "644",
1023 f"{PARTITION}/run/systemd/resolve/stub-resolv.conf"
1024 ],
1025 check=True)
1026 target([
1027 "chown", "systemd-resolve:systemd-resolve",
1028 f"/run/systemd/resolve/stub-resolv.conf"
1029 ])
Austin Schuh51014832023-10-20 17:44:45 -07001030 target(["systemctl", "enable", "systemd-resolved"])
1031
1032 target([
1033 "apt-get", "-y", "install", "bpfcc-tools", "sudo",
Austin Schuhcf1dda52024-01-01 20:19:21 -08001034 "openssh-server", "python3", "bash-completion", "git",
Austin Schuh51014832023-10-20 17:44:45 -07001035 "cpufrequtils", "pmount", "rsync", "vim-nox", "chrony",
Austin Schuhcf1dda52024-01-01 20:19:21 -08001036 "libnice10", "pmount", "libnice-dev", "feh", "usbutils", "locales",
1037 "trace-cmd", "clinfo", "jq", "strace", "sysstat", "lm-sensors",
1038 "can-utils", "xfsprogs", "bridge-utils", "net-tools", "apt-file",
1039 "parted", "xxd", "file", "pkexec", "libxkbfile1", "gdb", "autossh",
1040 "smartmontools", "nvme-cli", "libgtk-3.0"
Austin Schuh51014832023-10-20 17:44:45 -07001041 ])
1042 target(["apt-get", "clean"])
1043
1044 target(["usermod", "-a", "-G", "sudo", "pi"])
1045 target(["usermod", "-a", "-G", "video", "pi"])
1046 target(["usermod", "-a", "-G", "systemd-journal", "pi"])
1047 target(["usermod", "-a", "-G", "dialout", "pi"])
1048
1049 virtual_packages = [
Austin Schuhcf1dda52024-01-01 20:19:21 -08001050 'libglib-2.0-0',
1051 'libglvnd',
1052 'libgtk-3-0',
1053 'libxcb-glx',
1054 'wayland',
1055 'libz1',
Austin Schuh51014832023-10-20 17:44:45 -07001056 ]
1057
1058 install_virtual_packages(virtual_packages)
1059
Austin Schuhcf1dda52024-01-01 20:19:21 -08001060 yocto_packages = list_yocto_packages()
1061 packages = list_packages()
1062
1063 # Install the kernel and modules after all the normal packages are in.
1064 yocto_packages_to_install = [
1065 package for package in yocto_packages
1066 if (package.name.startswith('kernel-module-') or package.name.
1067 startswith('kernel-5.10') or package.name == 'kernel-modules')
1068 ]
1069
1070 packages_to_remove = []
1071
1072 # Remove kernel-module-* packages + kernel- package.
1073 for key in packages:
1074 if key.startswith('kernel-module') or key.startswith(
1075 'kernel-5.10'):
1076 already_installed = False
1077 for index, yocto_package in enumerate(
1078 yocto_packages_to_install):
1079 if key == yocto_package.name and packages[
1080 key] == yocto_package.version:
1081 already_installed = True
1082 del yocto_packages_to_install[index]
1083 break
1084 if not already_installed:
1085 packages_to_remove.append(key)
1086
1087 print("Removing", packages_to_remove)
1088 if len(packages_to_remove) > 0:
1089 target(['dpkg', '--purge'] + packages_to_remove)
1090 print("Installing",
1091 [package.name for package in yocto_packages_to_install])
1092
1093 install_packages(yocto_packages_to_install, packages)
1094
Austin Schuh51014832023-10-20 17:44:45 -07001095 yocto_package_names = [
Austin Schuh86d980e2023-10-20 22:44:47 -07001096 'tegra-argus-daemon',
1097 'tegra-firmware',
1098 'tegra-firmware-tegra234',
1099 'tegra-firmware-vic',
1100 'tegra-firmware-xusb',
1101 'tegra-libraries-argus-daemon-base',
1102 'tegra-libraries-camera',
1103 'tegra-libraries-core',
1104 'tegra-libraries-cuda',
1105 'tegra-libraries-eglcore',
1106 'tegra-libraries-glescore',
1107 'tegra-libraries-glxcore',
1108 'tegra-libraries-multimedia',
Austin Schuh51014832023-10-20 17:44:45 -07001109 'tegra-libraries-multimedia-utils',
Austin Schuh86d980e2023-10-20 22:44:47 -07001110 'tegra-libraries-multimedia-v4l',
1111 'tegra-libraries-nvsci',
1112 'tegra-libraries-vulkan',
1113 'tegra-nvphs',
1114 'tegra-nvphs-base',
1115 'libnvidia-egl-wayland1',
1116 'tegra-mmapi',
1117 'tegra-mmapi-dev',
1118 'cuda-cudart-11-8',
1119 'cuda-cudart-11-8-dev',
1120 'cuda-cudart-11-8-stubs',
1121 'libcurand-11-8',
1122 'libcurand-11-8-dev',
1123 'libcurand-11-8-stubs',
1124 'cuda-nvcc-11-8',
1125 'tegra-cmake-overrides',
1126 'cuda-target-environment',
1127 'libnpp-11-8',
1128 'libnpp-11-8-stubs',
1129 'libnpp-11-8-dev',
1130 'cuda-cccl-11-8',
1131 'cuda-nvcc-11-8',
1132 'cuda-nvcc-headers-11-8',
Austin Schuhbffbe8b2023-11-22 21:32:05 -08001133 'nsight-systems-cli',
1134 'nsight-systems-cli-qdstrmimporter',
Austin Schuh6243c762023-12-24 14:42:45 -08001135 'tegra-tools-jetson-clocks',
Austin Schuhcf1dda52024-01-01 20:19:21 -08001136 'gstreamer1.0',
1137 'gstreamer1.0-plugins-base',
1138 'libgstallocators-1.0-0',
1139 'liborc',
1140 'libgstvideo-1.0-0',
1141 'libnvdsbufferpool1.0.0',
1142 'gstreamer1.0-plugins-nvarguscamerasrc',
1143 'cuda-cudart',
1144 'gstreamer1.0-plugins-nvvidconv',
1145 'gstreamer1.0-plugins-tegra',
1146 'gstreamer1.0-plugins-nvcompositor',
1147 'gstreamer1.0-plugins-nvdrmvideosink',
1148 'gstreamer1.0-plugins-nveglgles',
1149 'gstreamer1.0-plugins-nvjpeg',
1150 'gstreamer1.0-plugins-nvtee',
1151 'gstreamer1.0-plugins-nvv4l2camerasrc',
1152 'gstreamer1.0-plugins-nvvideo4linux2',
1153 'gstreamer1.0-plugins-nvvideosinks',
1154 'gstreamer1.0-plugins-tegra-binaryonly',
1155 'libgstnvcustomhelper',
1156 'gstreamer1.0-plugins-bad-videoparsersbad',
1157 'libgstcodecparsers-1.0-0',
1158 'libgstriff-1.0-0',
1159 'liborc-0.4-0',
1160 'libgstaudio-1.0-0',
1161 'libgsttag-1.0-0',
1162 'gstreamer1.0-plugins-good-rtp',
1163 'libgstrtp-1.0-0',
1164 'gstreamer1.0-plugins-good-udp',
1165 # Yocto's doesn't work with gstreamer, and we don't actually care
1166 # hugely. opencv seems to work.
1167 'libv4l',
1168 'libv4l-dev',
1169 'libgstpbutils-1.0-0',
1170 'libgstnvdsseimeta1.0.0',
1171 'media-ctl',
1172 'libgstapp-1.0-0',
Austin Schuh51014832023-10-20 17:44:45 -07001173 ]
Austin Schuh51014832023-10-20 17:44:45 -07001174
1175 install_packages([
1176 package for package in yocto_packages
1177 if package.name in yocto_package_names
1178 ], packages)
1179
Austin Schuhcf1dda52024-01-01 20:19:21 -08001180 install_virtual_packages([
1181 'libgstreamer1.0-0',
1182 "libgstreamer-plugins-base1.0-0",
1183 ])
Austin Schuh51014832023-10-20 17:44:45 -07001184
Austin Schuhcf1dda52024-01-01 20:19:21 -08001185 target([
1186 "apt-mark",
1187 "hold",
1188 "gstreamer1.0-plugins-base",
1189 "libgstreamer1.0-0",
1190 "liborc-0.4-0",
1191 ])
Austin Schuh51014832023-10-20 17:44:45 -07001192
Austin Schuhcf1dda52024-01-01 20:19:21 -08001193 # Opencv depends on gstreamer, but we want our gstreamer... So install
1194 # ours first, install the adapter packages, then install theirs.
1195 target([
1196 "apt-get",
1197 "-y",
1198 "install",
1199 "libopencv-calib3d406",
1200 "libopencv-contrib406",
1201 "libopencv-core406",
1202 "libopencv-features2d406",
1203 "libopencv-flann406",
1204 "libopencv-highgui406",
1205 "libopencv-imgcodecs406",
1206 "libopencv-imgproc406",
1207 "libopencv-ml406",
1208 "libopencv-objdetect406",
1209 "libopencv-photo406",
1210 "libopencv-shape406",
1211 "libopencv-stitching406",
1212 "libopencv-superres406",
1213 "libopencv-video406",
1214 "libopencv-videoio406",
1215 "libopencv-videostab406",
1216 "libopencv-viz406",
1217 "libopencv-dev",
1218 ])
Austin Schuh51014832023-10-20 17:44:45 -07001219
1220 target(["systemctl", "enable", "nvargus-daemon.service"])
1221
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -07001222 copyfile("root:root", "644", "etc/sysctl.d/sctp.conf")
1223 copyfile("root:root", "644", "etc/systemd/logind.conf")
1224 copyfile("root:root", "555",
Austin Schuh51014832023-10-20 17:44:45 -07001225 "etc/bash_completion.d/aos_dump_autocomplete")
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -07001226 copyfile("root:root", "644", "etc/security/limits.d/rt.conf")
1227 copyfile("root:root", "644", "etc/systemd/system/usb-mount@.service")
1228 copyfile("root:root", "644", "etc/chrony/chrony.conf")
1229 target_mkdir("root:root", "700", "root/bin")
1230 target_mkdir("pi:pi", "755", "home/pi/.ssh")
1231 copyfile("pi:pi", "600", "home/pi/.ssh/authorized_keys")
1232 target_mkdir("root:root", "700", "root/bin")
1233 copyfile("root:root", "644", "etc/systemd/system/grow-rootfs.service")
Jim Ostrowskiafcc0982024-01-15 15:28:53 -08001234 copyfile("root:root", "644", "etc/systemd/system/frc971.service")
1235 copyfile("root:root", "644", "etc/systemd/system/frc971chrt.service")
Austin Schuh6243c762023-12-24 14:42:45 -08001236 copyfile("root:root", "644",
1237 "etc/systemd/system/jetson-clocks.service")
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -07001238 copyfile("root:root", "500", "root/bin/change_hostname.sh")
Jim Ostrowskiafcc0982024-01-15 15:28:53 -08001239 copyfile("root:root", "500", "root/bin/chrt.sh")
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -07001240 copyfile("root:root", "700", "root/trace.sh")
1241 copyfile("root:root", "440", "etc/sudoers")
1242 copyfile("root:root", "644", "etc/fstab")
1243 copyfile("root:root", "644",
Austin Schuh51014832023-10-20 17:44:45 -07001244 "var/nvidia/nvcam/settings/camera_overrides.isp")
Jim Ostrowskiafcc0982024-01-15 15:28:53 -08001245 copyfile("root:root", "644", "/etc/ld.so.conf.d/yocto.conf")
Austin Schuh51014832023-10-20 17:44:45 -07001246
Jim Ostrowski12ef0ff2023-10-22 23:20:20 -07001247 target_mkdir("root:root", "755", "etc/systemd/network")
1248 copyfile("root:root", "644", "etc/systemd/network/eth0.network")
Austin Schuh8de7def2024-01-01 12:47:02 -08001249 copyfile("root:root", "644", "etc/systemd/network/80-cana.network")
1250 copyfile("root:root", "644", "etc/systemd/network/80-canb.network")
1251 copyfile("root:root", "644", "etc/systemd/network/80-canc.network")
Austin Schuhbffbe8b2023-11-22 21:32:05 -08001252 copyfile("root:root", "644", "etc/udev/rules.d/nvidia.rules")
Austin Schuh8de7def2024-01-01 12:47:02 -08001253 copyfile("root:root", "644", "etc/udev/rules.d/can.rules")
Jim Ostrowskiafcc0982024-01-15 15:28:53 -08001254 target(["/root/bin/change_hostname.sh", "orin-971-1"])
Austin Schuh51014832023-10-20 17:44:45 -07001255
1256 target(["systemctl", "enable", "systemd-networkd"])
1257 target(["systemctl", "enable", "grow-rootfs"])
Austin Schuh6243c762023-12-24 14:42:45 -08001258 target(["systemctl", "enable", "jetson-clocks"])
Jim Ostrowskiafcc0982024-01-15 15:28:53 -08001259 target(["systemctl", "enable", "frc971"])
1260 target(["systemctl", "enable", "frc971chrt"])
Austin Schuhbffbe8b2023-11-22 21:32:05 -08001261
Jim Ostrowskid25f5bc2024-02-03 20:03:46 -08001262 # Set up HW clock to use /dev/rtc0 and install hwclock service
1263 target(["ln", "-sf", "/dev/rtc0", "/dev/rtc"])
1264 target_unescaped(
1265 "sed -i s/ATTR{hctosys}==\\\"1\\\"/ATTR{hctosys}==\\\"0\\\"/ /lib/udev/rules.d/50-udev-default.rules"
1266 )
1267 copyfile("root:root", "644", "etc/systemd/system/hwclock.service")
1268 target(["systemctl", "enable", "hwclock"])
1269
Austin Schuh51014832023-10-20 17:44:45 -07001270 target(["apt-file", "update"])
1271
1272 target(["ldconfig"])
1273
1274 if not os.path.exists(f"{partition}/home/pi/.dotfiles"):
1275 pi_target_unescaped(
1276 "cd /home/pi/ && git clone --separate-git-dir=/home/pi/.dotfiles https://github.com/AustinSchuh/.dotfiles.git tmpdotfiles && rsync --recursive --verbose --exclude .git tmpdotfiles/ /home/pi/ && rm -r tmpdotfiles && git --git-dir=/home/pi/.dotfiles/ --work-tree=/home/pi/ config --local status.showUntrackedFiles no"
1277 )
1278 pi_target(["vim", "-c", "\":qa!\""])
1279
1280 target_unescaped(
1281 "cd /root/ && git clone --separate-git-dir=/root/.dotfiles https://github.com/AustinSchuh/.dotfiles.git tmpdotfiles && rsync --recursive --verbose --exclude .git tmpdotfiles/ /root/ && rm -r tmpdotfiles && git --git-dir=/root/.dotfiles/ --work-tree=/root/ config --local status.showUntrackedFiles no"
1282 )
1283 target(["vim", "-c", "\":qa!\""])
1284
Jim Ostrowskif9396de2024-01-22 15:13:21 -08001285 # Do this after all the network needs are finished, since it won't
1286 # allow us to find URL's from the build server (frc971)
1287 target(["systemctl", "disable", "systemd-resolved"])
1288 copyfile("systemd-resolve:systemd-resolve", "644",
1289 "run/systemd/resolve/stub-resolv.conf")
1290 target(["systemctl", "enable", "systemd-resolved"])
1291
Austin Schuhbffbe8b2023-11-22 21:32:05 -08001292 generate_build_file(partition)
Austin Schuh86d980e2023-10-20 22:44:47 -07001293
Austin Schuhbffbe8b2023-11-22 21:32:05 -08001294 do_package(partition)
Austin Schuh86d980e2023-10-20 22:44:47 -07001295
Austin Schuh51014832023-10-20 17:44:45 -07001296
1297if __name__ == '__main__':
1298 main()