From 1bb228fcfd057eb6a493a1275ce374821fb2a652 Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Tue, 16 Jan 2024 22:33:06 +0000 Subject: [PATCH] README: point to new source location --- .b4-config | 9 - .build.yml | 26 - .ci/codespell.sh | 20 - .ci/note.sh | 6 - .ci/pytest.sh | 72 - .ci/ruff.sh | 19 - .ci/shellcheck.sh | 15 - .ci/vermin.sh | 25 - .gitignore | 120 -- CONTRIBUTING.md | 37 - LICENSE | 674 --------- MANIFEST.in | 2 - README.md | 287 +--- helpers/envkernel.fish | 56 - helpers/envkernel.sh | 426 ------ helpers/envsetup.sh | 11 - pmb/__init__.py | 104 -- pmb/aportgen/__init__.py | 82 -- pmb/aportgen/busybox_static.py | 73 - pmb/aportgen/core.py | 221 --- pmb/aportgen/device.py | 331 ----- pmb/aportgen/gcc.py | 92 -- pmb/aportgen/grub_efi.py | 63 - pmb/aportgen/linux.py | 129 -- pmb/aportgen/musl.py | 99 -- pmb/build/__init__.py | 9 - pmb/build/_package.py | 518 ------- pmb/build/autodetect.py | 95 -- pmb/build/checksum.py | 34 - pmb/build/envkernel.py | 236 --- pmb/build/init.py | 118 -- pmb/build/kconfig.py | 164 --- pmb/build/newapkbuild.py | 50 - pmb/build/other.py | 179 --- pmb/chroot/__init__.py | 9 - pmb/chroot/apk.py | 266 ---- pmb/chroot/apk_static.py | 176 --- pmb/chroot/binfmt.py | 64 - pmb/chroot/init.py | 129 -- pmb/chroot/initfs.py | 122 -- pmb/chroot/initfs_hooks.py | 60 - pmb/chroot/mount.py | 123 -- pmb/chroot/other.py | 75 - pmb/chroot/root.py | 81 -- pmb/chroot/shutdown.py | 104 -- pmb/chroot/user.py | 40 - pmb/chroot/zap.py | 171 --- pmb/ci/__init__.py | 169 --- pmb/config/__init__.py | 1156 --------------- pmb/config/init.py | 768 ---------- pmb/config/load.py | 34 - pmb/config/merge_with_args.py | 37 - pmb/config/pmaports.py | 208 --- pmb/config/save.py | 11 - pmb/config/sudo.py | 36 - pmb/config/workdir.py | 115 -- pmb/data/keys/README | 9 - ...vel@lists.alpinelinux.org-4a6a0840.rsa.pub | 9 - ...vel@lists.alpinelinux.org-5243ef4b.rsa.pub | 9 - ...vel@lists.alpinelinux.org-524d27bb.rsa.pub | 9 - ...vel@lists.alpinelinux.org-5261cecb.rsa.pub | 9 - ...vel@lists.alpinelinux.org-58199dcc.rsa.pub | 9 - ...vel@lists.alpinelinux.org-58cbb476.rsa.pub | 9 - ...vel@lists.alpinelinux.org-58e4f17d.rsa.pub | 9 - ...vel@lists.alpinelinux.org-5e69ca50.rsa.pub | 9 - ...vel@lists.alpinelinux.org-60ac2099.rsa.pub | 9 - ...vel@lists.alpinelinux.org-6165ee59.rsa.pub | 14 - ...vel@lists.alpinelinux.org-61666e3f.rsa.pub | 14 - ...vel@lists.alpinelinux.org-616a9724.rsa.pub | 14 - ...vel@lists.alpinelinux.org-616abc23.rsa.pub | 14 - ...vel@lists.alpinelinux.org-616ac3bc.rsa.pub | 14 - ...vel@lists.alpinelinux.org-616adfeb.rsa.pub | 14 - ...vel@lists.alpinelinux.org-616ae350.rsa.pub | 14 - ...vel@lists.alpinelinux.org-616db30d.rsa.pub | 14 - pmb/data/keys/build.postmarketos.org.rsa.pub | 14 - pmb/data/locales | 304 ---- pmb/data/qemu-user-binfmt.txt | 61 - pmb/export/__init__.py | 5 - pmb/export/frontend.py | 33 - pmb/export/odin.py | 108 -- pmb/export/symlinks.py | 76 - pmb/flasher/__init__.py | 8 - pmb/flasher/frontend.py | 171 --- pmb/flasher/init.py | 54 - pmb/flasher/run.py | 80 -- pmb/flasher/variables.py | 103 -- pmb/helpers/__init__.py | 2 - pmb/helpers/apk.py | 134 -- pmb/helpers/aportupgrade.py | 291 ---- pmb/helpers/args.py | 144 -- pmb/helpers/cli.py | 146 -- pmb/helpers/devices.py | 73 - pmb/helpers/file.py | 103 -- pmb/helpers/frontend.py | 653 --------- pmb/helpers/git.py | 274 ---- pmb/helpers/http.py | 94 -- pmb/helpers/lint.py | 47 - pmb/helpers/logging.py | 129 -- pmb/helpers/mount.py | 108 -- pmb/helpers/other.py | 315 ---- pmb/helpers/package.py | 181 --- pmb/helpers/pkgrel_bump.py | 130 -- pmb/helpers/pmaports.py | 294 ---- pmb/helpers/repo.py | 219 --- pmb/helpers/repo_missing.py | 135 -- pmb/helpers/run.py | 48 - pmb/helpers/run_core.py | 393 ----- pmb/helpers/status.py | 163 --- pmb/helpers/ui.py | 23 - pmb/install/__init__.py | 9 - pmb/install/_install.py | 1277 ----------------- pmb/install/blockdevice.py | 145 -- pmb/install/format.py | 150 -- pmb/install/losetup.py | 81 -- pmb/install/partition.py | 192 --- pmb/install/recovery.py | 72 - pmb/install/ui.py | 36 - pmb/netboot/__init__.py | 68 - pmb/parse/__init__.py | 11 - pmb/parse/_apkbuild.py | 429 ------ pmb/parse/apkindex.py | 396 ----- pmb/parse/arch.py | 124 -- pmb/parse/arguments.py | 926 ------------ pmb/parse/binfmt_info.py | 33 - pmb/parse/bootimg.py | 141 -- pmb/parse/cpuinfo.py | 32 - pmb/parse/depends.py | 186 --- pmb/parse/deviceinfo.py | 156 -- pmb/parse/kconfig.py | 335 ----- pmb/parse/version.py | 309 ---- pmb/qemu/__init__.py | 3 - pmb/qemu/run.py | 382 ----- pmb/sideload/__init__.py | 93 -- pmbootstrap.py | 12 - pyproject.toml | 5 - setup.cfg | 2 - setup.py | 75 - test/pmb_test/__init__.py | 8 - test/pmb_test/const.py | 6 - test/pmb_test/git.py | 65 - test/pytest.ini | 4 - test/test_apk.py | 160 --- test/test_apk_static.py | 131 -- test/test_aportgen.py | 131 -- test/test_aportgen_device_wizard.py | 182 --- test/test_arguments.py | 27 - test/test_bootimg.py | 161 --- test/test_build_is_necessary.py | 89 -- test/test_build_package.py | 483 ------- test/test_chroot_interactive_shell.py | 52 - test/test_chroot_mount.py | 40 - test/test_config_init.py | 20 - test/test_config_pmaports.py | 73 - test/test_config_user.py | 68 - test/test_config_workdir.py | 151 -- test/test_envkernel.py | 119 -- test/test_file.py | 36 - test/test_folder_size.py | 37 - test/test_frontend.py | 19 - test/test_helpers_git.py | 194 --- test/test_helpers_lint.py | 43 - test/test_helpers_package.py | 137 -- test/test_helpers_pmaports.py | 52 - test/test_helpers_repo.py | 73 - test/test_helpers_repo_missing.py | 138 -- test/test_helpers_status.py | 92 -- test/test_helpers_ui.py | 35 - test/test_install.py | 178 --- test/test_mount.py | 25 - test/test_newapkbuild.py | 73 - test/test_parse_apkbuild.py | 163 --- test/test_parse_apkindex.py | 395 ----- test/test_parse_depends.py | 167 --- test/test_parse_deviceinfo.py | 38 - test/test_parse_kconfig.py | 465 ------ test/test_pkgrel_bump.py | 171 --- test/test_qemu_running_processes.py | 191 --- test/test_questions.py | 305 ---- test/test_run_core.py | 175 --- test/test_shell_escape.py | 115 -- test/test_version.py | 70 - test/test_version_validate.py | 16 - test/test_zzz_keys.py | 69 - .../apkbuild/APKBUILD.depends-in-depends | 8 - test/testdata/apkbuild/APKBUILD.lint | 33 - .../apkbuild/APKBUILD.linux-envkernel-test | 20 - .../APKBUILD.missing-pkgdesc-in-subpackage | 10 - test/testdata/apkbuild/APKBUILD.subpackages | 13 - .../apkbuild/APKBUILD.variable-replacements | 12 - test/testdata/apkbuild/APKBUILD.weird-pkgver | 8 - test/testdata/apkindex/conflict | 20 - test/testdata/apkindex/key_missing | 23 - test/testdata/apkindex/key_twice | 25 - test/testdata/apkindex/new_line_missing | 23 - test/testdata/apkindex/no_error | 45 - test/testdata/apkindex/virtual_package | 31 - .../aportgen/aports/main/binutils/APKBUILD | 143 -- .../aportgen/aports/main/gcc/APKBUILD | 792 ---------- .../pmaports/cross/gcc-armhf/APKBUILD | 810 ----------- .../aportgen/pmaports/temp/binutils/APKBUILD | 143 -- test/testdata/bootimg/boot-header-v2.img | Bin 4096 -> 0 bytes test/testdata/bootimg/boot-header-v3.img | Bin 4096 -> 0 bytes test/testdata/bootimg/dtb-second-boot.img | Bin 4096 -> 0 bytes test/testdata/bootimg/kernel-boot.img | Bin 1024 -> 0 bytes .../testdata/bootimg/mtk-boot-kernel-only.img | Bin 6209536 -> 0 bytes test/testdata/bootimg/mtk-boot-recovery.img | Bin 9803776 -> 0 bytes test/testdata/bootimg/mtk-boot.img | Bin 6211584 -> 0 bytes test/testdata/bootimg/normal-boot.img | Bin 2048 -> 0 bytes test/testdata/bootimg/qcdt-boot.img | Bin 4096 -> 0 bytes test/testdata/build_local_src/APKBUILD | 31 - test/testdata/channels.cfg | 22 - .../testing/device-multiple-kernels/APKBUILD | 29 - .../device-multiple-kernels/deviceinfo | 7 - .../postmarketos-ui-plasma-mobile/APKBUILD | 11 - .../testdata/helpers_ui/pmaports/pmaports.cfg | 1 - .../device/testing/device-lg-mako/APKBUILD | 16 - .../device/testing/device-lg-mako/deviceinfo | 27 - .../APKBUILD | 33 - .../testing/device-nonfree-firmware/APKBUILD | 28 - .../testing/device-nonfree-userland/APKBUILD | 20 - .../device/testing/device-sony-amami/APKBUILD | 45 - .../device-wileyfox-crackling/APKBUILD | 60 - .../main/postmarketos-ui-weston/APKBUILD | 16 - .../pkgrel_bump/aports/testapp/APKBUILD | 29 - .../pkgrel_bump/aports/testapp/testapp.c | 7 - .../pkgrel_bump/aports/testlib/APKBUILD | 38 - .../pkgrel_bump/aports/testlib/testlib.c | 5 - .../pkgrel_bump/aports/testlib/testlib.h | 3 - .../pkgrel_bump/aports/testsubpkg/APKBUILD | 33 - .../pkgrel_bump/aports/testsubpkg/testapp.c | 7 - test/testdata/pmaports.cfg | 5 - .../main/postmarketos-ui-test/APKBUILD | 13 - test/testdata/pmb_install/big.bin | Bin 7000 -> 0 bytes test/testdata/pmb_install/binary2.bin | Bin 100 -> 0 bytes test/testdata/pmb_install/full.bin | Bin 5120 -> 0 bytes test/testdata/pmb_install/overrun.bin | Bin 15000 -> 0 bytes test/testdata/pmb_install/small.bin | Bin 600 -> 0 bytes .../main/postmarketos-ui-test/APKBUILD | 13 - .../pmb_recommends/main/test-app/APKBUILD | 8 - test/testdata/version/README | 2 - test/testdata/version/version.data | 728 ---------- 241 files changed, 4 insertions(+), 28188 deletions(-) delete mode 100644 .b4-config delete mode 100644 .build.yml delete mode 100755 .ci/codespell.sh delete mode 100755 .ci/note.sh delete mode 100755 .ci/pytest.sh delete mode 100755 .ci/ruff.sh delete mode 100755 .ci/shellcheck.sh delete mode 100755 .ci/vermin.sh delete mode 100644 .gitignore delete mode 100644 CONTRIBUTING.md delete mode 100644 LICENSE delete mode 100644 MANIFEST.in delete mode 100644 helpers/envkernel.fish delete mode 100644 helpers/envkernel.sh delete mode 100644 helpers/envsetup.sh delete mode 100644 pmb/__init__.py delete mode 100644 pmb/aportgen/__init__.py delete mode 100644 pmb/aportgen/busybox_static.py delete mode 100644 pmb/aportgen/core.py delete mode 100644 pmb/aportgen/device.py delete mode 100644 pmb/aportgen/gcc.py delete mode 100644 pmb/aportgen/grub_efi.py delete mode 100644 pmb/aportgen/linux.py delete mode 100644 pmb/aportgen/musl.py delete mode 100644 pmb/build/__init__.py delete mode 100644 pmb/build/_package.py delete mode 100644 pmb/build/autodetect.py delete mode 100644 pmb/build/checksum.py delete mode 100644 pmb/build/envkernel.py delete mode 100644 pmb/build/init.py delete mode 100644 pmb/build/kconfig.py delete mode 100644 pmb/build/newapkbuild.py delete mode 100644 pmb/build/other.py delete mode 100644 pmb/chroot/__init__.py delete mode 100644 pmb/chroot/apk.py delete mode 100644 pmb/chroot/apk_static.py delete mode 100644 pmb/chroot/binfmt.py delete mode 100644 pmb/chroot/init.py delete mode 100644 pmb/chroot/initfs.py delete mode 100644 pmb/chroot/initfs_hooks.py delete mode 100644 pmb/chroot/mount.py delete mode 100644 pmb/chroot/other.py delete mode 100644 pmb/chroot/root.py delete mode 100644 pmb/chroot/shutdown.py delete mode 100644 pmb/chroot/user.py delete mode 100644 pmb/chroot/zap.py delete mode 100644 pmb/ci/__init__.py delete mode 100644 pmb/config/__init__.py delete mode 100644 pmb/config/init.py delete mode 100644 pmb/config/load.py delete mode 100644 pmb/config/merge_with_args.py delete mode 100644 pmb/config/pmaports.py delete mode 100644 pmb/config/save.py delete mode 100644 pmb/config/sudo.py delete mode 100644 pmb/config/workdir.py delete mode 100644 pmb/data/keys/README delete mode 100644 pmb/data/keys/alpine-devel@lists.alpinelinux.org-4a6a0840.rsa.pub delete mode 100644 pmb/data/keys/alpine-devel@lists.alpinelinux.org-5243ef4b.rsa.pub delete mode 100644 pmb/data/keys/alpine-devel@lists.alpinelinux.org-524d27bb.rsa.pub delete mode 100644 pmb/data/keys/alpine-devel@lists.alpinelinux.org-5261cecb.rsa.pub delete mode 100644 pmb/data/keys/alpine-devel@lists.alpinelinux.org-58199dcc.rsa.pub delete mode 100644 pmb/data/keys/alpine-devel@lists.alpinelinux.org-58cbb476.rsa.pub delete mode 100644 pmb/data/keys/alpine-devel@lists.alpinelinux.org-58e4f17d.rsa.pub delete mode 100644 pmb/data/keys/alpine-devel@lists.alpinelinux.org-5e69ca50.rsa.pub delete mode 100644 pmb/data/keys/alpine-devel@lists.alpinelinux.org-60ac2099.rsa.pub delete mode 100644 pmb/data/keys/alpine-devel@lists.alpinelinux.org-6165ee59.rsa.pub delete mode 100644 pmb/data/keys/alpine-devel@lists.alpinelinux.org-61666e3f.rsa.pub delete mode 100644 pmb/data/keys/alpine-devel@lists.alpinelinux.org-616a9724.rsa.pub delete mode 100644 pmb/data/keys/alpine-devel@lists.alpinelinux.org-616abc23.rsa.pub delete mode 100644 pmb/data/keys/alpine-devel@lists.alpinelinux.org-616ac3bc.rsa.pub delete mode 100644 pmb/data/keys/alpine-devel@lists.alpinelinux.org-616adfeb.rsa.pub delete mode 100644 pmb/data/keys/alpine-devel@lists.alpinelinux.org-616ae350.rsa.pub delete mode 100644 pmb/data/keys/alpine-devel@lists.alpinelinux.org-616db30d.rsa.pub delete mode 100644 pmb/data/keys/build.postmarketos.org.rsa.pub delete mode 100644 pmb/data/locales delete mode 100644 pmb/data/qemu-user-binfmt.txt delete mode 100644 pmb/export/__init__.py delete mode 100644 pmb/export/frontend.py delete mode 100644 pmb/export/odin.py delete mode 100644 pmb/export/symlinks.py delete mode 100644 pmb/flasher/__init__.py delete mode 100644 pmb/flasher/frontend.py delete mode 100644 pmb/flasher/init.py delete mode 100644 pmb/flasher/run.py delete mode 100644 pmb/flasher/variables.py delete mode 100644 pmb/helpers/__init__.py delete mode 100644 pmb/helpers/apk.py delete mode 100644 pmb/helpers/aportupgrade.py delete mode 100644 pmb/helpers/args.py delete mode 100644 pmb/helpers/cli.py delete mode 100644 pmb/helpers/devices.py delete mode 100644 pmb/helpers/file.py delete mode 100644 pmb/helpers/frontend.py delete mode 100644 pmb/helpers/git.py delete mode 100644 pmb/helpers/http.py delete mode 100644 pmb/helpers/lint.py delete mode 100644 pmb/helpers/logging.py delete mode 100644 pmb/helpers/mount.py delete mode 100644 pmb/helpers/other.py delete mode 100644 pmb/helpers/package.py delete mode 100644 pmb/helpers/pkgrel_bump.py delete mode 100644 pmb/helpers/pmaports.py delete mode 100644 pmb/helpers/repo.py delete mode 100644 pmb/helpers/repo_missing.py delete mode 100644 pmb/helpers/run.py delete mode 100644 pmb/helpers/run_core.py delete mode 100644 pmb/helpers/status.py delete mode 100644 pmb/helpers/ui.py delete mode 100644 pmb/install/__init__.py delete mode 100644 pmb/install/_install.py delete mode 100644 pmb/install/blockdevice.py delete mode 100644 pmb/install/format.py delete mode 100644 pmb/install/losetup.py delete mode 100644 pmb/install/partition.py delete mode 100644 pmb/install/recovery.py delete mode 100644 pmb/install/ui.py delete mode 100644 pmb/netboot/__init__.py delete mode 100644 pmb/parse/__init__.py delete mode 100644 pmb/parse/_apkbuild.py delete mode 100644 pmb/parse/apkindex.py delete mode 100644 pmb/parse/arch.py delete mode 100644 pmb/parse/arguments.py delete mode 100644 pmb/parse/binfmt_info.py delete mode 100644 pmb/parse/bootimg.py delete mode 100644 pmb/parse/cpuinfo.py delete mode 100644 pmb/parse/depends.py delete mode 100644 pmb/parse/deviceinfo.py delete mode 100644 pmb/parse/kconfig.py delete mode 100644 pmb/parse/version.py delete mode 100644 pmb/qemu/__init__.py delete mode 100644 pmb/qemu/run.py delete mode 100644 pmb/sideload/__init__.py delete mode 100755 pmbootstrap.py delete mode 100644 pyproject.toml delete mode 100644 setup.cfg delete mode 100755 setup.py delete mode 100644 test/pmb_test/__init__.py delete mode 100644 test/pmb_test/const.py delete mode 100644 test/pmb_test/git.py delete mode 100644 test/pytest.ini delete mode 100644 test/test_apk.py delete mode 100644 test/test_apk_static.py delete mode 100644 test/test_aportgen.py delete mode 100644 test/test_aportgen_device_wizard.py delete mode 100644 test/test_arguments.py delete mode 100644 test/test_bootimg.py delete mode 100644 test/test_build_is_necessary.py delete mode 100644 test/test_build_package.py delete mode 100644 test/test_chroot_interactive_shell.py delete mode 100644 test/test_chroot_mount.py delete mode 100644 test/test_config_init.py delete mode 100644 test/test_config_pmaports.py delete mode 100644 test/test_config_user.py delete mode 100644 test/test_config_workdir.py delete mode 100644 test/test_envkernel.py delete mode 100644 test/test_file.py delete mode 100644 test/test_folder_size.py delete mode 100644 test/test_frontend.py delete mode 100644 test/test_helpers_git.py delete mode 100644 test/test_helpers_lint.py delete mode 100644 test/test_helpers_package.py delete mode 100644 test/test_helpers_pmaports.py delete mode 100644 test/test_helpers_repo.py delete mode 100644 test/test_helpers_repo_missing.py delete mode 100644 test/test_helpers_status.py delete mode 100644 test/test_helpers_ui.py delete mode 100644 test/test_install.py delete mode 100644 test/test_mount.py delete mode 100644 test/test_newapkbuild.py delete mode 100644 test/test_parse_apkbuild.py delete mode 100644 test/test_parse_apkindex.py delete mode 100644 test/test_parse_depends.py delete mode 100644 test/test_parse_deviceinfo.py delete mode 100644 test/test_parse_kconfig.py delete mode 100644 test/test_pkgrel_bump.py delete mode 100644 test/test_qemu_running_processes.py delete mode 100644 test/test_questions.py delete mode 100644 test/test_run_core.py delete mode 100644 test/test_shell_escape.py delete mode 100644 test/test_version.py delete mode 100644 test/test_version_validate.py delete mode 100644 test/test_zzz_keys.py delete mode 100644 test/testdata/apkbuild/APKBUILD.depends-in-depends delete mode 100644 test/testdata/apkbuild/APKBUILD.lint delete mode 100644 test/testdata/apkbuild/APKBUILD.linux-envkernel-test delete mode 100644 test/testdata/apkbuild/APKBUILD.missing-pkgdesc-in-subpackage delete mode 100644 test/testdata/apkbuild/APKBUILD.subpackages delete mode 100644 test/testdata/apkbuild/APKBUILD.variable-replacements delete mode 100644 test/testdata/apkbuild/APKBUILD.weird-pkgver delete mode 100644 test/testdata/apkindex/conflict delete mode 100644 test/testdata/apkindex/key_missing delete mode 100644 test/testdata/apkindex/key_twice delete mode 100644 test/testdata/apkindex/new_line_missing delete mode 100644 test/testdata/apkindex/no_error delete mode 100644 test/testdata/apkindex/virtual_package delete mode 100644 test/testdata/aportgen/aports/main/binutils/APKBUILD delete mode 100644 test/testdata/aportgen/aports/main/gcc/APKBUILD delete mode 100644 test/testdata/aportgen/pmaports/cross/gcc-armhf/APKBUILD delete mode 100644 test/testdata/aportgen/pmaports/temp/binutils/APKBUILD delete mode 100644 test/testdata/bootimg/boot-header-v2.img delete mode 100644 test/testdata/bootimg/boot-header-v3.img delete mode 100644 test/testdata/bootimg/dtb-second-boot.img delete mode 100644 test/testdata/bootimg/kernel-boot.img delete mode 100644 test/testdata/bootimg/mtk-boot-kernel-only.img delete mode 100644 test/testdata/bootimg/mtk-boot-recovery.img delete mode 100644 test/testdata/bootimg/mtk-boot.img delete mode 100644 test/testdata/bootimg/normal-boot.img delete mode 100644 test/testdata/bootimg/qcdt-boot.img delete mode 100644 test/testdata/build_local_src/APKBUILD delete mode 100644 test/testdata/channels.cfg delete mode 100644 test/testdata/deviceinfo/aports/device/testing/device-multiple-kernels/APKBUILD delete mode 100644 test/testdata/deviceinfo/aports/device/testing/device-multiple-kernels/deviceinfo delete mode 100644 test/testdata/helpers_ui/pmaports/main/postmarketos-ui-plasma-mobile/APKBUILD delete mode 120000 test/testdata/helpers_ui/pmaports/pmaports.cfg delete mode 100644 test/testdata/init_questions_device/aports/device/testing/device-lg-mako/APKBUILD delete mode 100644 test/testdata/init_questions_device/aports/device/testing/device-lg-mako/deviceinfo delete mode 100644 test/testdata/init_questions_device/aports/device/testing/device-nonfree-firmware-and-userland/APKBUILD delete mode 100644 test/testdata/init_questions_device/aports/device/testing/device-nonfree-firmware/APKBUILD delete mode 100644 test/testdata/init_questions_device/aports/device/testing/device-nonfree-userland/APKBUILD delete mode 100644 test/testdata/init_questions_device/aports/device/testing/device-sony-amami/APKBUILD delete mode 100644 test/testdata/init_questions_device/aports/device/testing/device-wileyfox-crackling/APKBUILD delete mode 100644 test/testdata/init_questions_device/aports/main/postmarketos-ui-weston/APKBUILD delete mode 100644 test/testdata/pkgrel_bump/aports/testapp/APKBUILD delete mode 100644 test/testdata/pkgrel_bump/aports/testapp/testapp.c delete mode 100644 test/testdata/pkgrel_bump/aports/testlib/APKBUILD delete mode 100644 test/testdata/pkgrel_bump/aports/testlib/testlib.c delete mode 100644 test/testdata/pkgrel_bump/aports/testlib/testlib.h delete mode 100644 test/testdata/pkgrel_bump/aports/testsubpkg/APKBUILD delete mode 100644 test/testdata/pkgrel_bump/aports/testsubpkg/testapp.c delete mode 100644 test/testdata/pmaports.cfg delete mode 100644 test/testdata/pmb_groups/main/postmarketos-ui-test/APKBUILD delete mode 100644 test/testdata/pmb_install/big.bin delete mode 100644 test/testdata/pmb_install/binary2.bin delete mode 100644 test/testdata/pmb_install/full.bin delete mode 100644 test/testdata/pmb_install/overrun.bin delete mode 100644 test/testdata/pmb_install/small.bin delete mode 100644 test/testdata/pmb_recommends/main/postmarketos-ui-test/APKBUILD delete mode 100644 test/testdata/pmb_recommends/main/test-app/APKBUILD delete mode 100644 test/testdata/version/README delete mode 100644 test/testdata/version/version.data diff --git a/.b4-config b/.b4-config deleted file mode 100644 index 2eeda0fc..00000000 --- a/.b4-config +++ /dev/null @@ -1,9 +0,0 @@ -# Allow this repository to be used with the 'b4' tool. See -# https://postmarketos.org/patch-review for details. - -[b4] - midmask = https://lists.sr.ht/~postmarketos/pmbootstrap-devel/%s - linkmask = https://lists.sr.ht/~postmarketos/pmbootstrap-devel/%%3C%s%%3E - send-series-to = ~postmarketos/pmbootstrap-devel@lists.sr.ht - send-endpoint-web = NONE - backend = sourcehut diff --git a/.build.yml b/.build.yml deleted file mode 100644 index 292dcf99..00000000 --- a/.build.yml +++ /dev/null @@ -1,26 +0,0 @@ -image: alpine/edge -packages: - - sudo -sources: - - https://git.sr.ht/~postmarketos/pmbootstrap -tasks: - - note: | - pmbootstrap/.ci/note.sh - - shellcheck: | - cd pmbootstrap - sudo .ci/shellcheck.sh - - ruff: | - cd pmbootstrap - sudo .ci/ruff.sh - - vermin: | - cd pmbootstrap - sudo .ci/vermin.sh - - codespell: | - cd pmbootstrap - sudo .ci/codespell.sh - - pytest: | - cd pmbootstrap - sudo .ci/pytest.sh -artifacts: - - ".local/var/pmbootstrap/log.txt" - - ".local/var/pmbootstrap/log_testsuite.txt" diff --git a/.ci/codespell.sh b/.ci/codespell.sh deleted file mode 100755 index 5dff389a..00000000 --- a/.ci/codespell.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/sh -ex -# SPDX-License-Identifier: GPL-3.0-or-later -# Copyright 2023 Oliver Smith -# Description: find typos -# https://postmarketos.org/pmb-ci - -if [ "$(id -u)" = 0 ]; then - set -x - apk -q add \ - py3-codespell - exec su "${TESTUSER:-build}" -c "sh -e $0" -fi - -set -x - -# -L: words to ignore -codespell \ - -L crate \ - -L hda \ - . diff --git a/.ci/note.sh b/.ci/note.sh deleted file mode 100755 index b5fd0cf7..00000000 --- a/.ci/note.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh -e - -printf "\n" -printf "PROTIP: use" -printf " \e[1;32mpmbootstrap ci\e[0m" -printf " to run these scripts locally.\n" diff --git a/.ci/pytest.sh b/.ci/pytest.sh deleted file mode 100755 index c29cd882..00000000 --- a/.ci/pytest.sh +++ /dev/null @@ -1,72 +0,0 @@ -#!/bin/sh -e -# Description: run pmbootstrap python testsuite -# Options: native slow -# https://postmarketos.org/pmb-ci - -if [ "$(id -u)" = 0 ]; then - set -x - apk -q add \ - git \ - openssl \ - py3-pytest \ - py3-pytest-cov \ - sudo - exec su "${TESTUSER:-build}" -c "sh -e $0" -fi - -# Require pytest to be installed on the host system -if [ -z "$(command -v pytest)" ]; then - echo "ERROR: pytest command not found, make sure it is in your PATH." - exit 1 -fi - -# Use pytest-cov if it is installed to display code coverage -cov_arg="" -if python -c "import pytest_cov" >/dev/null 2>&1; then - cov_arg="--cov=pmb" -fi - -echo "Initializing pmbootstrap..." -if ! yes '' | ./pmbootstrap.py \ - --details-to-stdout \ - init \ - >/tmp/pmb_init 2>&1; then - cat /tmp/pmb_init - exit 1 -fi - -# Make sure that the work folder format is up to date, and that there are no -# mounts from aborted test cases (#1595) -./pmbootstrap.py work_migrate -./pmbootstrap.py -q shutdown - -# Make sure we have a valid device (#1128) -device="$(./pmbootstrap.py config device)" -pmaports="$(./pmbootstrap.py config aports)" -deviceinfo="$(ls -1 "$pmaports"/device/*/device-"$device"/deviceinfo)" -if ! [ -e "$deviceinfo" ]; then - echo "ERROR: Could not find deviceinfo file for selected device:" \ - "$device" - echo "Expected path: $deviceinfo" - echo "Maybe you have switched to a branch where your device does not" - echo "exist? Use 'pmbootstrap config device qemu-amd64' to switch to" - echo "a valid device." - exit 1 -fi - -# Make sure pmaports is clean, some of the tests will fail otherwise -if [ -n "$(git -C "$pmaports" status --porcelain)" ]; then - echo "ERROR: pmaports dir is not clean" - exit 1 -fi - -echo "Running pytest..." -echo "NOTE: use 'pmbootstrap log' to see the detailed log if running locally." -pytest \ - --color=yes \ - -vv \ - -x \ - $cov_arg \ - test \ - -m "not skip_ci" \ - "$@" diff --git a/.ci/ruff.sh b/.ci/ruff.sh deleted file mode 100755 index edf721d0..00000000 --- a/.ci/ruff.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/sh -e -# Description: lint all python scripts -# https://postmarketos.org/pmb-ci - -if [ "$(id -u)" = 0 ]; then - set -x - apk -q add ruff - exec su "${TESTUSER:-build}" -c "sh -e $0" -fi - -set -x - -# __init__.py with additional ignore: -# F401: imported, but not used -# shellcheck disable=SC2046 -ruff --ignore "F401" $(find . -not -path '*/venv/*' -name '__init__.py') - -# Check all other files -ruff --exclude=__init__.py . diff --git a/.ci/shellcheck.sh b/.ci/shellcheck.sh deleted file mode 100755 index 9d07e335..00000000 --- a/.ci/shellcheck.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -e -# Description: lint all shell scripts -# https://postmarketos.org/pmb-ci - -if [ "$(id -u)" = 0 ]; then - set -x - apk -q add shellcheck - exec su "${TESTUSER:-build}" -c "sh -e $0" -fi - -find . -name '*.sh' | -while read -r file; do - echo "shellcheck: $file" - shellcheck "$file" -done diff --git a/.ci/vermin.sh b/.ci/vermin.sh deleted file mode 100755 index 6c8cbe68..00000000 --- a/.ci/vermin.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/sh -e -# Description: verify that we don't use too new python features -# https://postmarketos.org/pmb-ci - -if [ "$(id -u)" = 0 ]; then - set -x - apk -q add vermin - exec su "${TESTUSER:-build}" -c "sh -e $0" -fi - -# shellcheck disable=SC2046 -vermin \ - -t=3.7- \ - --backport argparse \ - --backport configparser \ - --backport enum \ - --backport typing \ - --lint \ - --no-parse-comments \ - --eval-annotations \ - $(find . -name '*.py' \ - -a -not -path "./.venv/*" \ - -a -not -path "./venv/*") - -echo "vermin check passed" diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 02ebbd6a..00000000 --- a/.gitignore +++ /dev/null @@ -1,120 +0,0 @@ -# pmbootstrap will clone "pmaports" and (if pmbootstrap is not installed system -# wide) create a symlink in the aports folder -/aports - -# The rest below is more or less from a default Python .gitignore -.*.swp - -# Failed patches -*.rej -*.orig - -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -env/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -*.egg-info/ -.installed.cfg -*.egg - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -.hypothesis/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# pyenv -.python-version - -# celery beat schedule file -celerybeat-schedule - -# SageMath parsed files -*.sage.py - -# dotenv -.env - -# virtualenv -.venv -venv/ -ENV/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ - -# Visual Studio Code -.vscode/ - -# Pytest -.pytest_cache - -# JetBrains IDEs (PyCharm, etc) -.idea diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index 0a65c3ef..00000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,37 +0,0 @@ -## Reporting issues -* Consider joining the [chat](https://wiki.postmarketos.org/wiki/Matrix_and_IRC) for instant help. -* Maybe your question is answered in the [wiki](https://wiki.postmarketos.org/) somewhere. [Search](https://wiki.postmarketos.org/index.php?search=&title=Special%3ASearch&go=Go) first! -* Otherwise, just ask what you want to know. We're happy if we can help you and glad that you're using `pmbootstrap`! - -## Development - -See pmbootstrap's [Development Guide](https://wiki.postmarketos.org/wiki/Development_guide). - -### Contributing code changes -* [Fork](https://docs.gitlab.com/ee/gitlab-basics/fork-project.html) this repository, commit your changes and then make a [Merge Request](https://docs.gitlab.com/ee/workflow/merge_requests.html). -* Please test your code before submitting a Merge Request. - -### Shell scripting -* We don't write scripts for `bash`, but for `busybox`'s `ash` shell, which is POSIX compliant (plus very few features from `bash`). -* Use `shellcheck` to test your changes for issues before submitting. There is even an [online](https://www.shellcheck.net) version. -* We're looking into automatizing this more, some files already get checked automatically by the [static code analysis script](test/static_code_analysis.sh). - -### Python -* We use the [PEP8](https://www.python.org/dev/peps/pep-0008/) standard for Python code. Don't worry, you don't need to read all that, just run the `autopep8` program on your changed code, and confirm with the [static code analysis script](test/static_code_analysis.sh) that everything is PEP8 compliant. *This script will run automatically on Travis CI when you make a Merge Request, and it must pass for your code to get accepted.* -* We use the `reST` style for `docstrings` below functions (to comment what individual functions are doing, you'll see those when browsing through the code). Please stick to this format, and try to describe the important parameters and return values at least. Example from [here](https://stackoverflow.com/a/24385103): - -```Python -""" -This is a reST style. - -:param param1: this is a first param -:param param2: this is a second param -:returns: this is a description of what is returned -:raises keyError: raises an exception -""" -``` - -* If it is feasible for you, try to run the testsuite on code that you have changed. The `test/test_build.py` case will build full cross-compilers for `aarch64` and `armhf`, so it may take a long time. Testcases can be started with `pytest` and it's planned to run that automatically when making a new Merge Request (see #64). - - -**If you need any help, don't hesitate to open an [issue](https://gitlab.com/postmarketOS/pmbootstrap/issues) and ask!** diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 9cecc1d4..00000000 --- a/LICENSE +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - {one line to give the program's name and a brief idea of what it does.} - Copyright (C) {year} {name of author} - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - {project} Copyright (C) {year} {fullname} - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 0e481d1c..00000000 --- a/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -include LICENSE -recursive-include pmb/data * diff --git a/README.md b/README.md index 0d3522b9..1195c869 100644 --- a/README.md +++ b/README.md @@ -1,286 +1,7 @@ # pmbootstrap -Sophisticated chroot/build/flash tool to develop and install -[postmarketOS](https://postmarketos.org). +The pmbootstrap repository has been moved: +https://postmarketos.org/source-code/ -## Development - -pmbootstrap is being developed on SourceHut -([what](https://postmarketos.org/blog/2022/07/25/considering-sourcehut/)): - -https://git.sr.ht/~postmarketos/pmbootstrap - -Send patches via mail or web UI to -[pmbootstrap-devel](https://lists.sr.ht/~postmarketos/pmbootstrap-devel) -([subscribe](mailto:~postmarketos/pmbootstrap-devel+subscribe@lists.sr.ht)): -``` -~postmarketos/pmbootstrap-devel@lists.sr.ht -``` - -You can set the default values for sending email in the git checkout -``` -$ git config sendemail.to "~postmarketos/pmbootstrap-devel@lists.sr.ht" -$ git config format.subjectPrefix "PATCH pmbootstrap" -``` - -Run CI scripts locally with: -``` -$ pmbootstrap ci -``` - -Run a single test file: -``` -$ pytest -vv ./test/test_keys.py -``` - -## Issues - -Issues are being tracked -[here](https://gitlab.com/postmarketOS/pmbootstrap/-/issues). - -## Requirements -* Linux distribution on the host system (`x86`, `x86_64`, `aarch64` or `armv7`) - * [Windows subsystem for Linux (WSL)](https://en.wikipedia.org/wiki/Windows_Subsystem_for_Linux) - does **not** work! Please use [VirtualBox](https://www.virtualbox.org/) instead. - * [Linux kernel 3.17 or higher](https://postmarketos.org/oldkernel) -* Python 3.7+ -* OpenSSL -* git -* ps -* tar - -## Usage Examples -Please refer to the [postmarketOS wiki](https://wiki.postmarketos.org) for -in-depth coverage of topics such as -[porting to a new device](https://wiki.postmarketos.org/wiki/Porting_to_a_new_device) -or [installation](https://wiki.postmarketos.org/wiki/Installation_guide). The -help output (`pmbootstrap -h`) has detailed usage instructions for every -command. Read on for some generic examples of what can be done with -`pmbootstrap`. - -### Installing pmbootstrap - - -### Basics -Initial setup: -``` -$ pmbootstrap init -``` - -Run this in a second window to see all shell commands that get executed: -``` -$ pmbootstrap log -``` - -Quick health check and config overview: -``` -$ pmbootstrap status -``` - -### Packages -Build `aports/main/hello-world`: -``` -$ pmbootstrap build hello-world -``` - -Cross-compile to `armhf`: -``` -$ pmbootstrap build --arch=armhf hello-world -``` - -Build with source code from local folder: -``` -$ pmbootstrap build linux-postmarketos-mainline --src=~/code/linux -``` - -Update checksums: -``` -$ pmbootstrap checksum hello-world -``` - -Generate a template for a new package: -``` -$ pmbootstrap newapkbuild "https://gitlab.com/postmarketOS/osk-sdl/-/archive/0.52/osk-sdl-0.52.tar.bz2" -``` - -#### Default architecture - -Packages will be compiled for the architecture of the device running -pmbootstrap by default. For example, if your `x86_64` PC runs pmbootstrap, it -would build a package for `x86_64` with this command: -``` -$ pmbootstrap build hello-world -``` - -If you would rather build for the target device selected in `pmbootstrap init` -by default, then use the `build_default_device_arch` option: -``` -$ pmbootstrap config build_default_device_arch True -``` - -If your target device is `pine64-pinephone` for example, pmbootstrap will now -build this package for `aarch64`: -``` -$ pmbootstrap build hello-world -``` - -### Chroots -Enter the `armhf` building chroot: -``` -$ pmbootstrap chroot -b armhf -``` - -Run a command inside a chroot: -``` -$ pmbootstrap chroot -- echo test -``` - -Safely delete all chroots: -``` -$ pmbootstrap zap -``` - -### Device Porting Assistance -Analyze Android -[`boot.img`](https://wiki.postmarketos.org/wiki/Glossary#boot.img) files (also -works with recovery OS images like TWRP): -``` -$ pmbootstrap bootimg_analyze ~/Downloads/twrp-3.2.1-0-fp2.img -``` - -Check kernel configs: -``` -$ pmbootstrap kconfig check -``` - -Edit a kernel config: -``` -$ pmbootstrap kconfig edit --arch=armhf postmarketos-mainline -``` - -### Root File System -Build the rootfs: -``` -$ pmbootstrap install -``` - -Build the rootfs with full disk encryption: -``` -$ pmbootstrap install --fde -``` - -Update existing installation on SD card: -``` -$ pmbootstrap install --disk=/dev/mmcblk0 --rsync -``` - -Run the image in QEMU: -``` -$ pmbootstrap qemu --image-size=1G -``` - -Flash to the device: -``` -$ pmbootstrap flasher flash_kernel -$ pmbootstrap flasher flash_rootfs --partition=userdata -``` - -Export the rootfs, kernel, initramfs, `boot.img` etc.: -``` -$ pmbootstrap export -``` - -Extract the initramfs -``` -$ pmbootstrap initfs extract -``` - -Build and flash Android recovery zip: -``` -$ pmbootstrap install --android-recovery-zip -$ pmbootstrap flasher --method=adb sideload -``` - -### Repository Maintenance -List pmaports that don't have a binary package: -``` -$ pmbootstrap repo_missing --arch=armhf --overview -``` - -Increase the `pkgrel` for each aport where the binary package has outdated -dependencies (e.g. after soname bumps): -``` -$ pmbootstrap pkgrel_bump --auto -``` - -Generate cross-compiler aports based on the latest version from Alpine's -aports: -``` -$ pmbootstrap aportgen gcc-armhf -``` - -Manually rebuild package index: -``` -$ pmbootstrap index -``` - -Delete local binary packages without existing aport of same version: -``` -$ pmbootstrap zap -m -``` - -### Debugging -Use `-v` on any action to get verbose logging: -``` -$ pmbootstrap -v build hello-world -``` - -Parse a single deviceinfo and return it as JSON: -``` -$ pmbootstrap deviceinfo_parse pine64-pinephone -``` - -Parse a single APKBUILD and return it as JSON: -``` -$ pmbootstrap apkbuild_parse hello-world -``` - -Parse a package from an APKINDEX and return it as JSON: -``` -$ pmbootstrap apkindex_parse $WORK/cache_apk_x86_64/APKINDEX.8b865e19.tar.gz hello-world -``` - -`ccache` statistics: -``` -$ pmbootstrap stats --arch=armhf -``` - -### Use alternative sudo - -pmbootstrap supports `doas` and `sudo`. -If multiple sudo implementations are installed, pmbootstrap will use `doas`. -You can set the `PMB_SUDO` environmental variable to define the sudo -implementation you want to use. - -### Select SSH keys to include and make authorized in new images - -If the config file option `ssh_keys` is set to `True` (it defaults to `False`), -then all files matching the glob `~/.ssh/id_*.pub` will be placed in -`~/.ssh/authorized_keys` in the user's home directory in newly-built images. - -Sometimes, for example if you have a large number of SSH keys, you may wish to -select a different set of public keys to include in an image. To do this, set -the `ssh_key_glob` configuration parameter in the pmbootstrap config file to a -string containing a glob that is to match the file or files you wish to -include. - -For example, a `~/.config/pmbootstrap.cfg` may contain: - - [pmbootstrap] - # ... - ssh_keys = True - ssh_key_glob = ~/.ssh/postmarketos-dev.pub - # ... - -## License -[GPLv3](LICENSE) +Related blog post: +https://postmarketos.org/blog/2024/01/17/moving-pmbootstrap/ diff --git a/helpers/envkernel.fish b/helpers/envkernel.fish deleted file mode 100644 index 46bc96c3..00000000 --- a/helpers/envkernel.fish +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env fish -# Copyright 2019 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later - -for arg in $argv - if not string match -q -- "--gcc6" $arg; - and not string match -q -- "--gcc4" $arg - echo "usage: source envkernel.fish" - echo "optional arguments:" - echo " --gcc4 Use GCC4 cross compiler" - echo " --gcc6 Use GCC6 cross compiler" - echo " --help Show this help message" - exit 1 - end -end - -# Fish compatibility code from envkernel.sh -set script_dir (dirname (status filename)) -sh "$script_dir/envkernel.sh" $argv --fish 1>| read -z fishcode -set pmbootstrap_dir (realpath "$script_dir/..") - -# Verbose output (enable with: 'set ENVKERNEL_FISH_VERBOSE 1') -if [ "$ENVKERNEL_FISH_VERBOSE" = "1" ] - echo "(eval code start)" - printf "$fishcode" - echo "(eval code end)" -end - -# Execute generated code -echo -e "$fishcode" | source - - -# Set prompt -if test -z "$ENVKERNEL_DISABLE_PROMPT" - functions -c fish_prompt _old_fish_prompt - - function fish_prompt - set -l old_status $status - printf "[envkernel] " - echo "exit $old_status" | . - _old_fish_prompt - end -end - -# Deactivate -function deactivate - if functions -q _old_fish_prompt - functions -e fish_prompt - functions -c _old_fish_prompt fish_prompt - functions -e _old_fish_prompt - end - functions -e make kernelroot pmbootstrap pmbroot - functions -e deactivate reactivate -end - -# Reactivate -alias reactivate "deactivate; pushd '$PWD'; . '$pmbootstrap_dir'/helpers/envkernel.fish; popd" diff --git a/helpers/envkernel.sh b/helpers/envkernel.sh deleted file mode 100644 index 19db1a33..00000000 --- a/helpers/envkernel.sh +++ /dev/null @@ -1,426 +0,0 @@ -#!/bin/sh -# Copyright 2019 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -# -# usage example: -# $ cd ~/code/linux -# $ source ~/code/pmbootstrap/helpers/envkernel.sh - -check_kernel_folder() { - [ -e "Kbuild" ] && return - echo "ERROR: This folder is not a linux source tree: $PWD" - return 1 -} - - -clean_kernel_src_dir() { - # Prevent Linux from appending Git version information to kernel version - # This will cause kernels to be packaged incorrectly. - touch .scmversion - - if [ -f ".config" ] || [ -d "include/config" ]; then - echo "Source directory is not clean, running 'make mrproper'." - - tmp_dir="" - if [ -d ".output" ]; then - echo " * Preserving existing build output." - tmp_dir=$(mktemp -d) - sudo mv ".output" "$tmp_dir" - fi; - - # backslash is prefixed to disable the alias - # shellcheck disable=SC1001 - \make mrproper - - if [ -n "$tmp_dir" ]; then - sudo mv "$tmp_dir/.output" ".output" - sudo rmdir "$tmp_dir" - fi; - fi; -} - - -export_pmbootstrap_dir() { - if [ -n "$pmbootstrap_dir" ]; then - return 0; - fi - - # Get pmbootstrap dir based on this script's location - # See also: - # shellcheck disable=SC3054 - if [ -n "${BASH_SOURCE[0]}" ]; then - script_dir="$(dirname "$(realpath "$BASH_SOURCE")")" - else - script_dir="$(dirname "$1")" - fi - - # Fail with debug information - # shellcheck disable=SC2155 - export pmbootstrap_dir="$(realpath "$script_dir/..")" - if ! [ -e "$pmbootstrap_dir/pmbootstrap.py" ]; then - echo "ERROR: Failed to get the script's location with your shell." - echo "Please adjust export_pmbootstrap_dir in envkernel.sh. Debug info:" - echo "\$1: $1" - echo "\$pmbootstrap_dir: $pmbootstrap_dir" - return 1 - fi -} - - -set_alias_pmbootstrap() { - pmbootstrap="$pmbootstrap_dir"/pmbootstrap.py - # shellcheck disable=SC2139 - alias pmbootstrap="\"$pmbootstrap\"" - if [ -e "${XDG_CONFIG_HOME:-$HOME/.config}"/pmbootstrap.cfg ]; then - "$pmbootstrap" work_migrate - else - echo "NOTE: First run of pmbootstrap, running 'pmbootstrap init'" - "$pmbootstrap" init - fi -} - - -export_chroot_device_deviceinfo() { - chroot="$("$pmbootstrap" config work)/chroot_native" - device="$("$pmbootstrap" config device)" - deviceinfo="$(echo "$("$pmbootstrap" config aports)"/device/*/device-"$device"/deviceinfo)" - export chroot device deviceinfo -} - - -check_device() { - [ -e "$deviceinfo" ] && return - echo "ERROR: Please select a valid device in 'pmbootstrap init'" - return 1 -} - - -initialize_chroot() { - gcc_pkgname="gcc" - if [ "$gcc6_arg" = "1" ]; then - gcc_pkgname="gcc6" - fi - if [ "$gcc4_arg" = "1" ]; then - gcc_pkgname="gcc4" - fi - - # Kernel architecture - # shellcheck disable=SC2154 - case "$deviceinfo_arch" in - aarch64*) arch="arm64" ;; - arm*) arch="arm" ;; - x86_64) arch="x86_64" ;; - x86) arch="x86" ;; - esac - - # Check if it's a cross compile - host_arch="$(uname -m)" - need_cross_compiler=1 - # Match arm* architectures - # shellcheck disable=SC3057 - arch_substr="${host_arch:0:3}" - if [ "$arch" = "$host_arch" ] || \ - { [ "$arch_substr" = "arm" ] && [ "$arch_substr" = "$arch" ]; } || \ - { [ "$arch" = "arm64" ] && [ "$host_arch" = "aarch64" ]; } || \ - { [ "$arch" = "x86" ] && [ "$host_arch" = "x86_64" ]; }; then - need_cross_compiler=0 - fi - - # Don't initialize twice - flag="$chroot/tmp/envkernel/${gcc_pkgname}_setup_done" - [ -e "$flag" ] && return - - # Install needed packages - echo "Initializing Alpine chroot (details: 'pmbootstrap log')" - - cross_binutils="" - cross_gcc="" - if [ "$need_cross_compiler" = 1 ]; then - cross_binutils="binutils-$deviceinfo_arch" - cross_gcc="$gcc_pkgname-$deviceinfo_arch" - fi - - # FIXME: Ideally we would not "guess" the dependencies here. - # It might be better to take a kernel package name as parameter - # (e.g. . envkernel.sh linux-postmarketos-mainline) - # and install its build dependencies. - - # shellcheck disable=SC2086,SC2154 - "$pmbootstrap" -q chroot -- apk -q add \ - abuild \ - bash \ - bc \ - binutils \ - bison \ - $cross_binutils \ - $cross_gcc \ - diffutils \ - elfutils-dev \ - findutils \ - flex \ - g++ \ - "$gcc_pkgname" \ - gmp-dev \ - linux-headers \ - openssl-dev \ - make \ - mpc1-dev \ - mpfr-dev \ - musl-dev \ - ncurses-dev \ - perl \ - py3-dt-schema \ - sed \ - yamllint \ - yaml-dev \ - xz || return 1 - - # Create /mnt/linux - sudo mkdir -p "$chroot/mnt/linux" - - # Mark as initialized - "$pmbootstrap" -q chroot -- su pmos -c \ - "mkdir /tmp/envkernel; touch /tmp/envkernel/$(basename "$flag")" -} - - -mount_kernel_source() { - if [ -e "$chroot/mnt/linux/Kbuild" ]; then - sudo umount "$chroot/mnt/linux" - fi - sudo mount --bind "$PWD" "$chroot/mnt/linux" -} - - -create_output_folder() { - [ -d "$chroot/mnt/linux/.output" ] && return - mkdir -p ".output" - "$pmbootstrap" -q chroot -- chown -R pmos:pmos "/mnt/linux/.output" -} - - -set_alias_make() { - # Cross compiler prefix - # shellcheck disable=SC1091 - prefix="$(CBUILD="$deviceinfo_arch" . "$chroot/usr/share/abuild/functions.sh"; - arch_to_hostspec "$deviceinfo_arch")" - - if [ "$gcc6_arg" = "1" ]; then - cc="gcc6-${prefix}-gcc" - hostcc="gcc6-gcc" - cross_compiler="/usr/bin/gcc6-$prefix-" - elif [ "$gcc4_arg" = "1" ]; then - cc="gcc4-${prefix}-gcc" - hostcc="gcc4-gcc" - cross_compiler="/usr/bin/gcc4-$prefix-" - else - cc="${prefix}-gcc" - hostcc="gcc" - cross_compiler="/usr/bin/$prefix-" - fi - - if [ "$arch" = "x86" ] && [ "$host_arch" = "x86_64" ]; then - cc=$hostcc - fi - - # Build make command - cmd="echo '*** pmbootstrap envkernel.sh active for $PWD! ***';" - cmd="$cmd pmbootstrap -q chroot --user --" - cmd="$cmd CCACHE_DISABLE=1" - cmd="$cmd ARCH=$arch" - if [ "$need_cross_compiler" = 1 ]; then - cmd="$cmd CROSS_COMPILE=$cross_compiler" - fi - cmd="$cmd make -C /mnt/linux O=/mnt/linux/.output" - cmd="$cmd CC=$cc HOSTCC=$hostcc" - - # shellcheck disable=SC2139 - alias make="$cmd" - unset cmd - - # Build run-script command - cmd="_run_script() {" - cmd="$cmd echo '*** pmbootstrap envkernel.sh active for $PWD! ***';" - cmd="$cmd _script=\"\$1\";" - cmd="$cmd if [ -e \"\$_script\" ]; then" - cmd="$cmd echo \"Running \$_script in the chroot native /mnt/linux/\";" - cmd="$cmd pmbootstrap -q chroot --user -- sh -c \"cd /mnt/linux;" - cmd="$cmd srcdir=/mnt/linux builddir=/mnt/linux/.output tmpdir=/tmp/envkernel" - cmd="$cmd ./\"\$_script\"\";" - cmd="$cmd else" - cmd="$cmd echo \"ERROR: \$_script not found.\";" - cmd="$cmd fi;" - cmd="$cmd };" - cmd="$cmd _run_script \"\$@\"" - # shellcheck disable=SC2139 - alias run-script="$cmd" - unset cmd -} - - -set_alias_pmbroot_kernelroot() { - # shellcheck disable=SC2139 - alias pmbroot="cd '$pmbootstrap_dir'" - # shellcheck disable=SC2139 - alias kernelroot="cd '$PWD'" -} - - -cross_compiler_version() { - if [ "$need_cross_compiler" = 1 ]; then - "$pmbootstrap" chroot --user -- "${cross_compiler}gcc" --version \ - 2> /dev/null | grep "^.*gcc " | \ - awk -F'[()]' '{ print $1 "("$2")" }' - else - echo "none" - fi -} - - -update_prompt() { - if [ -n "$ZSH_VERSION" ]; then - # assume Zsh - export _OLD_PROMPT="$PROMPT" - export PROMPT="[envkernel] $PROMPT" - elif [ -n "$BASH_VERSION" ]; then - export _OLD_PS1="$PS1" - export PS1="[envkernel] $PS1" - fi -} - - -set_deactivate() { - cmd="_deactivate() {" - cmd="$cmd unset POSTMARKETOS_ENVKERNEL_ENABLED;" - cmd="$cmd unalias make kernelroot pmbootstrap pmbroot run-script;" - cmd="$cmd unalias deactivate reactivate;" - cmd="$cmd if [ -n \"\$_OLD_PS1\" ]; then" - cmd="$cmd export PS1=\"\$_OLD_PS1\";" - cmd="$cmd unset _OLD_PS1;" - cmd="$cmd elif [ -n \"\$_OLD_PROMPT\" ]; then" - cmd="$cmd export PROMPT=\"\$_OLD_PROMPT\";" - cmd="$cmd unset _OLD_PROMPT;" - cmd="$cmd fi" - cmd="$cmd };" - cmd="$cmd _deactivate \"\$@\"" - # shellcheck disable=SC2139 - alias deactivate="$cmd" - unset cmd -} - -set_reactivate() { - # shellcheck disable=SC2139 - alias reactivate="deactivate; pushd '$PWD'; . '$pmbootstrap_dir'/helpers/envkernel.sh; popd" -} - -check_and_deactivate() { - if [ "$POSTMARKETOS_ENVKERNEL_ENABLED" = 1 ]; then - # we already are running in envkernel - deactivate - fi -} - - -print_usage() { - # shellcheck disable=SC3054 - if [ -n "${BASH_SOURCE[0]}" ]; then - echo "usage: source $(basename "$(realpath "$BASH_SOURCE")")" - fi - echo "optional arguments:" - echo " --fish Print fish alias syntax (internally used)" - echo " --gcc6 Use GCC6 cross compiler" - echo " --gcc4 Use GCC4 cross compiler" - echo " --help Show this help message" -} - - -parse_args() { - unset fish_arg - unset gcc6_arg - unset gcc4_arg - - while [ "${1:-}" != "" ]; do - case $1 in - --fish) - fish_arg="$1" - shift - ;; - --gcc6) - gcc6_arg=1 - shift - ;; - --gcc4) - gcc4_arg=1 - shift - ;; - --help) - shift - return 0 - ;; - *) - echo "Invalid argument: $1" - shift - return 0 - ;; - esac - done - - return 1 -} - - -main() { - # Stop executing once a function fails - # shellcheck disable=SC1090 - if check_and_deactivate \ - && check_kernel_folder \ - && clean_kernel_src_dir \ - && export_pmbootstrap_dir "$1" \ - && set_alias_pmbootstrap \ - && export_chroot_device_deviceinfo \ - && check_device \ - && . "$deviceinfo" \ - && initialize_chroot \ - && mount_kernel_source \ - && create_output_folder \ - && set_alias_make \ - && set_alias_pmbroot_kernelroot \ - && update_prompt \ - && set_deactivate \ - && set_reactivate; then - - POSTMARKETOS_ENVKERNEL_ENABLED=1 - - # Success - echo "pmbootstrap envkernel.sh activated successfully." - echo " * kernel source: $PWD" - echo " * output folder: $PWD/.output" - echo " * architecture: $arch ($device is $deviceinfo_arch)" - echo " * cross compile: $(cross_compiler_version)" - echo " * aliases: make, kernelroot, pmbootstrap, pmbroot," \ - "run-script (see 'type make' etc.)" - echo " * run 'deactivate' to revert all env changes" - else - # Failure - echo "See also: " - return 1 - fi -} - - -# Print fish alias syntax (when called from envkernel.fish) -fish_compat() { - [ "$1" = "--fish" ] || return 0 - for name in make kernelroot pmbootstrap pmbroot; do - echo "alias $(alias $name | sed 's/=/ /')" - done -} - -if parse_args "$@"; then - print_usage "$0" - return 1 -fi - -# Run main() with all output redirected to stderr -# Afterwards print fish compatible syntax to stdout -main "$0" >&2 && fish_compat "$fish_arg" diff --git a/helpers/envsetup.sh b/helpers/envsetup.sh deleted file mode 100644 index 301db9df..00000000 --- a/helpers/envsetup.sh +++ /dev/null @@ -1,11 +0,0 @@ -#! /bin/sh -e -if [ -e "pmbootstrap.py" ]; then - PMB_PATH=$(pwd) - # shellcheck disable=SC2139 - alias pmbroot="cd \"$PMB_PATH\"" - # shellcheck disable=SC2139 - alias pmbootstrap="$PMB_PATH/pmbootstrap.py" -else - echo "ERROR: Please source this from the pmbootstrap folder." - return 1 -fi diff --git a/pmb/__init__.py b/pmb/__init__.py deleted file mode 100644 index ecdb80ca..00000000 --- a/pmb/__init__.py +++ /dev/null @@ -1,104 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -# PYTHON_ARGCOMPLETE_OK -import sys -import logging -import os -import traceback - -from . import config -from . import parse -from .config import init as config_init -from .helpers import frontend -from .helpers import logging as pmb_logging -from .helpers import mount -from .helpers import other - -# pmbootstrap version -__version__ = "2.1.0" - -# Python version check -version = sys.version_info -if version < (3, 7): - print("You need at least Python 3.7 to run pmbootstrap") - print("(You are running it with Python " + str(version.major) + - "." + str(version.minor) + ")") - sys.exit() - - -def main(): - # Wrap everything to display nice error messages - args = None - try: - # Parse arguments, set up logging - args = parse.arguments() - os.umask(0o22) - - # Store script invocation command - os.environ["PMBOOTSTRAP_CMD"] = sys.argv[0] - - # Sanity checks - other.check_grsec() - if not args.as_root and os.geteuid() == 0: - raise RuntimeError("Do not run pmbootstrap as root!") - - # Initialize or require config - if args.action == "init": - return config_init.frontend(args) - elif not os.path.exists(args.config): - raise RuntimeError("Please specify a config file, or run" - " 'pmbootstrap init' to generate one.") - elif not os.path.exists(args.work): - raise RuntimeError("Work path not found, please run 'pmbootstrap" - " init' to create it.") - - other.check_old_devices(args) - - # Migrate work folder if necessary - if args.action not in ["shutdown", "zap", "log"]: - other.migrate_work_folder(args) - - # Run the function with the action's name (in pmb/helpers/frontend.py) - if args.action: - getattr(frontend, args.action)(args) - else: - logging.info("Run pmbootstrap -h for usage information.") - - # Still active notice - if mount.ismount(args.work + "/chroot_native/dev"): - logging.info("NOTE: chroot is still active (use 'pmbootstrap" - " shutdown' as necessary)") - logging.info("DONE!") - - except KeyboardInterrupt: - print("\nCaught KeyboardInterrupt, exiting …") - sys.exit(130) # SIGINT(2) + 128 - - except Exception as e: - # Dump log to stdout when args (and therefore logging) init failed - if not args: - logging.getLogger().setLevel(logging.DEBUG) - - logging.info("ERROR: " + str(e)) - logging.info("See also: ") - logging.debug(traceback.format_exc()) - - # Hints about the log file (print to stdout only) - log_hint = "Run 'pmbootstrap log' for details." - if not args or not os.path.exists(args.log): - log_hint += (" Alternatively you can use '--details-to-stdout' to" - " get more output, e.g. 'pmbootstrap" - " --details-to-stdout init'.") - print() - print(log_hint) - print() - print("Before you report this error, ensure that pmbootstrap is " - "up to date.") - print("Find the latest version here:" - " https://git.sr.ht/~postmarketos/pmbootstrap/refs") - print(f"Your version: {__version__}") - return 1 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/pmb/aportgen/__init__.py b/pmb/aportgen/__init__.py deleted file mode 100644 index 897bf84b..00000000 --- a/pmb/aportgen/__init__.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import logging -import pmb.aportgen.busybox_static -import pmb.aportgen.device -import pmb.aportgen.gcc -import pmb.aportgen.linux -import pmb.aportgen.musl -import pmb.aportgen.grub_efi -import pmb.config -import pmb.helpers.cli - - -def get_cross_package_arches(pkgname): - """ - Get the arches for which we want to build cross packages. - :param pkgname: package name, e.g. "gcc-aarch64", "gcc-x86_64" - :returns: string of architecture(s) (space separated) - """ - if pkgname.endswith("-x86_64"): - return "aarch64" - else: - return "x86_64" - - -def properties(pkgname): - """ - Get the `pmb.config.aportgen` properties for the aport generator, based on - the pkgname prefix. - - Example: "musl-armhf" => ("musl", "cross", {"confirm_overwrite": False}) - - :param pkgname: package name - :returns: (prefix, folder, options) - """ - for folder, options in pmb.config.aportgen.items(): - for prefix in options["prefixes"]: - if pkgname.startswith(prefix): - return (prefix, folder, options) - logging.info("NOTE: aportgen is for generating postmarketOS specific" - " aports, such as the cross-compiler related packages" - " or the linux kernel fork packages.") - logging.info("NOTE: If you wanted to package new software in general, try" - " 'pmbootstrap newapkbuild' to generate a template.") - raise ValueError("No generator available for " + pkgname + "!") - - -def generate(args, pkgname): - if args.fork_alpine: - prefix, folder, options = (pkgname, "temp", - {"confirm_overwrite": True}) - else: - prefix, folder, options = properties(pkgname) - path_target = args.aports + "/" + folder + "/" + pkgname - - # Confirm overwrite - if options["confirm_overwrite"] and os.path.exists(path_target): - logging.warning("WARNING: Target folder already exists: " - f"{path_target}") - if not pmb.helpers.cli.confirm(args, "Continue and overwrite?"): - raise RuntimeError("Aborted.") - - if os.path.exists(args.work + "/aportgen"): - pmb.helpers.run.user(args, ["rm", "-r", args.work + "/aportgen"]) - if args.fork_alpine: - upstream = pmb.aportgen.core.get_upstream_aport(args, pkgname) - pmb.helpers.run.user(args, ["cp", "-r", upstream, - f"{args.work}/aportgen"]) - pmb.aportgen.core.rewrite(args, pkgname, replace_simple={ - "# Contributor:*": None, "# Maintainer:*": None}) - else: - # Run pmb.aportgen.PREFIX.generate() - getattr(pmb.aportgen, prefix.replace("-", "_")).generate(args, pkgname) - - # Move to the aports folder - if os.path.exists(path_target): - pmb.helpers.run.user(args, ["rm", "-r", path_target]) - pmb.helpers.run.user( - args, ["mv", args.work + "/aportgen", path_target]) - - logging.info("*** pmaport generated: " + path_target) diff --git a/pmb/aportgen/busybox_static.py b/pmb/aportgen/busybox_static.py deleted file mode 100644 index e8278b85..00000000 --- a/pmb/aportgen/busybox_static.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import pmb.aportgen.core -import pmb.build -import pmb.chroot.apk -import pmb.chroot.apk_static -import pmb.helpers.run -import pmb.parse.apkindex - - -def generate(args, pkgname): - arch = pkgname.split("-")[2] - - # Parse version from APKINDEX - package_data = pmb.parse.apkindex.package(args, "busybox") - version = package_data["version"] - pkgver = version.split("-r")[0] - pkgrel = version.split("-r")[1] - - # Prepare aportgen tempdir inside and outside of chroot - tempdir = "/tmp/aportgen" - pmb.chroot.root(args, ["rm", "-rf", tempdir]) - pmb.helpers.run.user(args, ["mkdir", "-p", f"{args.work}/aportgen", - f"{args.work}/chroot_native/{tempdir}"]) - - # Write the APKBUILD - channel_cfg = pmb.config.pmaports.read_config_channel(args) - mirrordir = channel_cfg["mirrordir_alpine"] - apkbuild_path = f"{args.work}/chroot_native/{tempdir}/APKBUILD" - apk_name = f"busybox-static-$pkgver-r$pkgrel-$_arch-{mirrordir}.apk" - with open(apkbuild_path, "w", encoding="utf-8") as handle: - apkbuild = f"""\ - # Automatically generated aport, do not edit! - # Generator: pmbootstrap aportgen {pkgname} - - # Stub for apkbuild-lint - if [ -z "$(type -t arch_to_hostspec)" ]; then - arch_to_hostspec() {{ :; }} - fi - - pkgname={pkgname} - pkgver={pkgver} - pkgrel={pkgrel} - - _arch="{arch}" - _mirror="{pmb.config.aportgen_mirror_alpine}" - - url="http://busybox.net" - license="GPL2" - arch="{pmb.aportgen.get_cross_package_arches(pkgname)}" - options="!check !strip" - pkgdesc="Statically linked Busybox for $_arch" - _target="$(arch_to_hostspec $_arch)" - - source=" - busybox-static-$pkgver-r$pkgrel-$_arch-{mirrordir}.apk::$_mirror/{mirrordir}/main/$_arch/busybox-static-$pkgver-r$pkgrel.apk - " - - package() {{ - mkdir -p "$pkgdir/usr/$_target" - cd "$pkgdir/usr/$_target" - tar -xf $srcdir/{apk_name} - rm .PKGINFO .SIGN.* - }} - """ - for line in apkbuild.split("\n"): - handle.write(line[12:].replace(" " * 4, "\t") + "\n") - - # Generate checksums - pmb.build.init_abuild_minimal(args) - pmb.chroot.root(args, ["chown", "-R", "pmos:pmos", tempdir]) - pmb.chroot.user(args, ["abuild", "checksum"], working_dir=tempdir) - pmb.helpers.run.user(args, ["cp", apkbuild_path, f"{args.work}/aportgen"]) diff --git a/pmb/aportgen/core.py b/pmb/aportgen/core.py deleted file mode 100644 index 30ee94c9..00000000 --- a/pmb/aportgen/core.py +++ /dev/null @@ -1,221 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import fnmatch -import logging -import re -import glob -import pmb.helpers.git - - -def indent_size(line): - """ - Number of spaces at the beginning of a string. - """ - matches = re.findall("^[ ]*", line) - if len(matches) == 1: - return len(matches[0]) - return 0 - - -def format_function(name, body, remove_indent=4): - """ - Format the body of a shell function passed to rewrite() below, so it fits - the format of the original APKBUILD. - :param remove_indent: Maximum number of spaces to remove from the - beginning of each line of the function body. - """ - tab_width = 4 - ret = "" - lines = body.split("\n") - for i in range(len(lines)): - line = lines[i] - if not line.strip(): - if not ret or i == len(lines) - 1: - continue - - # Remove indent - spaces = min(indent_size(line), remove_indent) - line = line[spaces:] - - # Convert spaces to tabs - spaces = indent_size(line) - tabs = int(spaces / tab_width) - line = ("\t" * tabs) + line[spaces:] - - ret += line + "\n" - return name + "() {\n" + ret + "}\n" - - -def rewrite(args, pkgname, path_original="", fields={}, replace_pkgname=None, - replace_functions={}, replace_simple={}, below_header="", - remove_indent=4): - """ - Append a header to $WORK/aportgen/APKBUILD, delete maintainer/contributor - lines (so they won't be bugged with issues regarding our generated aports), - and add reference to the original aport. - - :param path_original: The original path of the automatically generated - aport. - :param fields: key-value pairs of fields that shall be changed in the - APKBUILD. For example: {"pkgdesc": "my new package", "subpkgs": ""} - :param replace_pkgname: When set, $pkgname gets replaced with that string - in every line. - :param replace_functions: Function names and new bodies, for example: - {"build": "return 0"} - The body can also be None (deletes the function) - :param replace_simple: Lines that fnmatch the pattern, get - replaced/deleted. Example: {"*test*": "# test", "*mv test.bin*": None} - :param below_header: String that gets directly placed below the header. - :param remove_indent: Number of spaces to remove from function body - provided to replace_functions. - - """ - # Header - if path_original: - lines_new = [ - "# Automatically generated aport, do not edit!\n", - "# Generator: pmbootstrap aportgen " + pkgname + "\n", - "# Based on: " + path_original + "\n", - "\n", - ] - else: - lines_new = [ - "# Forked from Alpine INSERT-REASON-HERE (CHANGEME!)\n", - "\n", - ] - - if below_header: - for line in below_header.split("\n"): - if not line[:8].strip(): - line = line[8:] - lines_new += line.rstrip() + "\n" - - # Copy/modify lines, skip Maintainer/Contributor - path = args.work + "/aportgen/APKBUILD" - with open(path, "r+", encoding="utf-8") as handle: - skip_in_func = False - for line in handle.readlines(): - # Skip maintainer/contributor - if line.startswith("# Maintainer") or line.startswith( - "# Contributor"): - continue - - # Replace functions - if skip_in_func: - if line.startswith("}"): - skip_in_func = False - continue - else: - for func, body in replace_functions.items(): - if line.startswith(func + "() {"): - skip_in_func = True - if body: - lines_new += format_function( - func, body, remove_indent=remove_indent) - break - if skip_in_func: - continue - - # Replace fields - for key, value in fields.items(): - if line.startswith(key + "="): - if value: - if key in ["pkgname", "pkgver", "pkgrel"]: - # No quotes to avoid lint error - line = f"{key}={value}\n" - else: - line = f'{key}="{value}"\n' - else: - # Remove line without value to avoid lint error - line = "" - break - - # Replace $pkgname - if replace_pkgname and "$pkgname" in line: - line = line.replace("$pkgname", replace_pkgname) - - # Replace simple - for pattern, replacement in replace_simple.items(): - if fnmatch.fnmatch(line, pattern + "\n"): - line = replacement - if replacement: - line += "\n" - break - if line is None: - continue - - lines_new.append(line) - - # Write back - handle.seek(0) - handle.write("".join(lines_new)) - handle.truncate() - - -def get_upstream_aport(args, pkgname, arch=None): - """ - Perform a git checkout of Alpine's aports and get the path to the aport. - - :param pkgname: package name - :param arch: Alpine architecture (e.g. "armhf"), defaults to native arch - :returns: absolute path on disk where the Alpine aport is checked out - example: /opt/pmbootstrap_work/cache_git/aports/upstream/main/gcc - """ - # APKBUILD - pmb.helpers.git.clone(args, "aports_upstream") - aports_upstream_path = args.work + "/cache_git/aports_upstream" - - # Checkout branch - channel_cfg = pmb.config.pmaports.read_config_channel(args) - branch = channel_cfg["branch_aports"] - logging.info(f"Checkout aports.git branch: {branch}") - if pmb.helpers.run.user(args, ["git", "checkout", branch], - aports_upstream_path, check=False): - logging.info("NOTE: run 'pmbootstrap pull' and try again") - logging.info("NOTE: if it still fails, your aports.git was cloned with" - " an older version of pmbootstrap, as shallow clone." - " Unshallow it, or remove it and let pmbootstrap clone it" - f" again: {aports_upstream_path}") - raise RuntimeError("Branch checkout failed.") - - # Search package - paths = glob.glob(aports_upstream_path + "/*/" + pkgname) - if len(paths) > 1: - raise RuntimeError("Package " + pkgname + " found in multiple" - " aports subfolders.") - elif len(paths) == 0: - raise RuntimeError("Package " + pkgname + " not found in alpine" - " aports repository.") - aport_path = paths[0] - - # Parse APKBUILD - apkbuild = pmb.parse.apkbuild(f"{aport_path}/APKBUILD", - check_pkgname=False) - apkbuild_version = apkbuild["pkgver"] + "-r" + apkbuild["pkgrel"] - - # Binary package - split = aport_path.split("/") - repo = split[-2] - pkgname = split[-1] - index_path = pmb.helpers.repo.alpine_apkindex_path(args, repo, arch) - package = pmb.parse.apkindex.package(args, pkgname, indexes=[index_path]) - - # Compare version (return when equal) - compare = pmb.parse.version.compare(apkbuild_version, package["version"]) - - # APKBUILD > binary: this is fine - if compare == 1: - logging.info(f"NOTE: {pkgname} {arch} binary package has a lower" - f" version {package['version']} than the APKBUILD" - f" {apkbuild_version}") - return aport_path - - # APKBUILD < binary: aports.git is outdated - if compare == -1: - logging.warning("WARNING: Package '" + pkgname + "' has a lower version in" - " local checkout of Alpine's aports (" + apkbuild_version + - ") compared to Alpine's binary package (" + - package["version"] + ")!") - logging.info("NOTE: You can update your local checkout with: 'pmbootstrap pull'") - - return aport_path diff --git a/pmb/aportgen/device.py b/pmb/aportgen/device.py deleted file mode 100644 index 692be811..00000000 --- a/pmb/aportgen/device.py +++ /dev/null @@ -1,331 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import os -import pmb.helpers.cli -import pmb.helpers.run -import pmb.aportgen.core -import pmb.parse.apkindex -import pmb.parse.bootimg - - -def ask_for_architecture(): - architectures = pmb.config.build_device_architectures - # Don't show armhf, new ports shouldn't use this architecture - if "armhf" in architectures: - architectures.remove("armhf") - while True: - ret = pmb.helpers.cli.ask("Device architecture", architectures, - "aarch64", complete=architectures) - if ret in architectures: - return ret - logging.fatal("ERROR: Invalid architecture specified. If you want to" - " add a new architecture, edit" - " build_device_architectures in" - " pmb/config/__init__.py.") - - -def ask_for_manufacturer(): - logging.info("Who produced the device (e.g. LG)?") - return pmb.helpers.cli.ask("Manufacturer", None, None, False) - - -def ask_for_name(manufacturer): - logging.info("What is the official name (e.g. Google Nexus 5)?") - ret = pmb.helpers.cli.ask("Name", None, None, False) - - # Always add the manufacturer - if not ret.startswith(manufacturer) and \ - not ret.startswith("Google"): - ret = manufacturer + " " + ret - return ret - - -def ask_for_year(): - # Regex from https://stackoverflow.com/a/12240826 - logging.info("In what year was the device released (e.g. 2012)?") - return pmb.helpers.cli.ask("Year", None, None, False, - validation_regex=r'^[1-9]\d{3,}$') - - -def ask_for_chassis(): - types = pmb.config.deviceinfo_chassis_types - - logging.info("What type of device is it?") - logging.info("Valid types are: " + ", ".join(types)) - return pmb.helpers.cli.ask("Chassis", None, None, True, - validation_regex='|'.join(types), - complete=types) - - -def ask_for_keyboard(args): - return pmb.helpers.cli.confirm(args, "Does the device have a hardware" - " keyboard?") - - -def ask_for_external_storage(args): - return pmb.helpers.cli.confirm(args, "Does the device have a sdcard or" - " other external storage medium?") - - -def ask_for_flash_method(): - while True: - logging.info("Which flash method does the device support?") - method = pmb.helpers.cli.ask("Flash method", - pmb.config.flash_methods, - "none", - complete=pmb.config.flash_methods) - - if method in pmb.config.flash_methods: - if method == "heimdall": - heimdall_types = ["isorec", "bootimg"] - while True: - logging.info("Does the device use the \"isolated" - " recovery\" or boot.img?") - logging.info("") - heimdall_type = pmb.helpers.cli.ask("Type", - heimdall_types, - heimdall_types[0]) - if heimdall_type in heimdall_types: - method += "-" + heimdall_type - break - logging.fatal("ERROR: Invalid type specified.") - return method - - logging.fatal("ERROR: Invalid flash method specified. If you want to" - " add a new flash method, edit flash_methods in" - " pmb/config/__init__.py.") - - -def ask_for_bootimg(args): - logging.info("You can analyze a known working boot.img file to" - " automatically fill out the flasher information for your" - " deviceinfo file. Either specify the path to an image or" - " press return to skip this step (you can do it later with" - " 'pmbootstrap bootimg_analyze').") - - while True: - response = pmb.helpers.cli.ask("Path", None, "", False) - path = os.path.expanduser(response) - if not path: - return None - try: - return pmb.parse.bootimg(args, path) - except Exception as e: - logging.fatal("ERROR: " + str(e) + ". Please try again.") - - -def generate_deviceinfo_fastboot_content(bootimg=None): - if bootimg is None: - bootimg = {"cmdline": "", - "qcdt": "false", - "dtb_second": "false", - "base": "", - "kernel_offset": "", - "ramdisk_offset": "", - "second_offset": "", - "tags_offset": "", - "pagesize": "2048", - "mtk_label_kernel": "", - "mtk_label_ramdisk": ""} - - content = f"""\ - deviceinfo_kernel_cmdline="{bootimg["cmdline"]}" - deviceinfo_generate_bootimg="true" - deviceinfo_bootimg_qcdt="{bootimg["qcdt"]}" - deviceinfo_bootimg_dtb_second="{bootimg["dtb_second"]}" - deviceinfo_flash_pagesize="{bootimg["pagesize"]}" - """ - - if "mtk_label_kernel" in bootimg.keys(): - content += f"""\ - deviceinfo_mtk_label_kernel="{bootimg["mtk_label_kernel"]}" - """ - if "mtk_label_ramdisk" in bootimg.keys(): - content += f"""\ - deviceinfo_mtk_label_ramdisk="{bootimg["mtk_label_ramdisk"]}" - """ - - if "header_version" in bootimg.keys(): - content += f"""\ - deviceinfo_header_version="{bootimg["header_version"]}" - """ - - if bootimg["header_version"] == "2": - content += f"""\ - deviceinfo_append_dtb="false" - deviceinfo_flash_offset_dtb="{bootimg["dtb_offset"]}" - """ - - if "base" in bootimg.keys(): - content += f"""\ - deviceinfo_flash_offset_base="{bootimg["base"]}" - deviceinfo_flash_offset_kernel="{bootimg["kernel_offset"]}" - deviceinfo_flash_offset_ramdisk="{bootimg["ramdisk_offset"]}" - deviceinfo_flash_offset_second="{bootimg["second_offset"]}" - deviceinfo_flash_offset_tags="{bootimg["tags_offset"]}" - """ - - return content - - -def generate_deviceinfo(args, pkgname, name, manufacturer, year, arch, - chassis, has_keyboard, has_external_storage, - flash_method, bootimg=None): - codename = "-".join(pkgname.split("-")[1:]) - external_storage = "true" if has_external_storage else "false" - # Note: New variables must be added to pmb/config/__init__.py as well - content = f"""\ - # Reference: - # Please use double quotes only. You can source this file in shell - # scripts. - - deviceinfo_format_version="0" - deviceinfo_name="{name}" - deviceinfo_manufacturer="{manufacturer}" - deviceinfo_codename="{codename}" - deviceinfo_year="{year}" - deviceinfo_dtb="" - deviceinfo_arch="{arch}" - - # Device related - deviceinfo_chassis="{chassis}" - deviceinfo_keyboard="{"true" if has_keyboard else "false"}" - deviceinfo_external_storage="{external_storage}" - deviceinfo_screen_width="800" - deviceinfo_screen_height="600" - - # Bootloader related - deviceinfo_flash_method="{flash_method}" - """ - - content_heimdall_bootimg = """\ - deviceinfo_flash_heimdall_partition_kernel="" - deviceinfo_flash_heimdall_partition_rootfs="" - """ - - content_heimdall_isorec = """\ - deviceinfo_flash_heimdall_partition_kernel="" - deviceinfo_flash_heimdall_partition_initfs="" - deviceinfo_flash_heimdall_partition_rootfs="" - """ - - content_0xffff = """\ - deviceinfo_generate_legacy_uboot_initfs="true" - """ - - content_uuu = """\ - deviceinfo_generate_legacy_uboot_initfs="true" - """ - - if flash_method == "fastboot": - content += generate_deviceinfo_fastboot_content(bootimg) - elif flash_method == "heimdall-bootimg": - content += generate_deviceinfo_fastboot_content(bootimg) - content += content_heimdall_bootimg - elif flash_method == "heimdall-isorec": - content += content_heimdall_isorec - elif flash_method == "0xffff": - content += content_0xffff - elif flash_method == "uuu": - content += content_uuu - - # Write to file - pmb.helpers.run.user(args, ["mkdir", "-p", args.work + "/aportgen"]) - path = args.work + "/aportgen/deviceinfo" - with open(path, "w", encoding="utf-8") as handle: - for line in content.rstrip().split("\n"): - handle.write(line.lstrip() + "\n") - - -def generate_modules_initfs(args): - content = """\ - # Remove this file if unnecessary (CHANGEME!) - # This file shall contain a list of modules to be included in the initramfs, - # so that they are available in early boot stages. In general, it should - # include modules to support unlocking FDE (touchscreen, panel, etc), - # USB networking, and telnet in the debug-shell. - # The format is one module name per line. Lines starting with the character - # '#', and empty lines are ignored. If there are multiple kernel variants - # with different initramfs module requirements, one modules-initfs.$variant - # file should be created for each of them. - """ - - # Write to file - pmb.helpers.run.user(args, ["mkdir", "-p", args.work + "/aportgen"]) - path = args.work + "/aportgen/modules-initfs" - with open(path, "w", encoding="utf-8") as handle: - for line in content.rstrip().split("\n"): - handle.write(line.lstrip() + "\n") - - -def generate_apkbuild(args, pkgname, name, arch, flash_method): - # Dependencies - depends = ["postmarketos-base", - "linux-" + "-".join(pkgname.split("-")[1:])] - if flash_method in ["fastboot", "heimdall-bootimg"]: - depends.append("mkbootimg") - if flash_method == "0xffff": - depends.append("uboot-tools") - - # Whole APKBUILD - depends.sort() - depends = ("\n" + " " * 12).join(depends) - content = f"""\ - # Reference: - pkgname={pkgname} - pkgdesc="{name}" - pkgver=0.1 - pkgrel=0 - url="https://postmarketos.org" - license="MIT" - arch="{arch}" - options="!check !archcheck" - depends=" - {depends} - " - makedepends="devicepkg-dev" - source=" - deviceinfo - modules-initfs - " - - build() {{ - devicepkg_build $startdir $pkgname - }} - - package() {{ - devicepkg_package $startdir $pkgname - }} - - sha512sums="(run 'pmbootstrap checksum {pkgname}' to fill)" - """ - - # Write the file - pmb.helpers.run.user(args, ["mkdir", "-p", args.work + "/aportgen"]) - path = args.work + "/aportgen/APKBUILD" - with open(path, "w", encoding="utf-8") as handle: - for line in content.rstrip().split("\n"): - handle.write(line[8:].replace(" " * 4, "\t") + "\n") - - -def generate(args, pkgname): - arch = ask_for_architecture() - manufacturer = ask_for_manufacturer() - name = ask_for_name(manufacturer) - year = ask_for_year() - chassis = ask_for_chassis() - has_keyboard = ask_for_keyboard(args) - has_external_storage = ask_for_external_storage(args) - flash_method = ask_for_flash_method() - bootimg = None - if flash_method in ["fastboot", "heimdall-bootimg"]: - bootimg = ask_for_bootimg(args) - - generate_deviceinfo(args, pkgname, name, manufacturer, year, arch, - chassis, has_keyboard, has_external_storage, - flash_method, bootimg) - generate_modules_initfs(args) - generate_apkbuild(args, pkgname, name, arch, flash_method) diff --git a/pmb/aportgen/gcc.py b/pmb/aportgen/gcc.py deleted file mode 100644 index c1422f91..00000000 --- a/pmb/aportgen/gcc.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import pmb.aportgen.core -import pmb.helpers.git -import pmb.helpers.run - - -def generate(args, pkgname): - # Copy original aport - prefix = pkgname.split("-")[0] - arch = pkgname.split("-")[1] - if prefix == "gcc": - upstream = pmb.aportgen.core.get_upstream_aport(args, "gcc", arch) - based_on = "main/gcc (from Alpine)" - elif prefix == "gcc4": - upstream = f"{args.aports}/main/gcc4" - based_on = "main/gcc4 (from postmarketOS)" - elif prefix == "gcc6": - upstream = f"{args.aports}/main/gcc6" - based_on = "main/gcc6 (from postmarketOS)" - else: - raise ValueError(f"Invalid prefix '{prefix}', expected gcc, gcc4 or" - " gcc6.") - pmb.helpers.run.user(args, ["cp", "-r", upstream, f"{args.work}/aportgen"]) - - # Rewrite APKBUILD - fields = { - "pkgname": pkgname, - "pkgdesc": f"Stage2 cross-compiler for {arch}", - "arch": pmb.aportgen.get_cross_package_arches(pkgname), - "depends": f"binutils-{arch} mpc1", - "makedepends_build": "gcc g++ bison flex texinfo gawk zip" - " gmp-dev mpfr-dev mpc1-dev zlib-dev", - "makedepends_host": "linux-headers gmp-dev mpfr-dev mpc1-dev isl-dev" - f" zlib-dev musl-dev-{arch} binutils-{arch}", - "subpackages": "", - - # gcc6: options is already there, so we need to replace it and not only - # set it below the header like done below. - "options": "!strip", - - "LIBGOMP": "false", - "LIBGCC": "false", - "LIBATOMIC": "false", - "LIBITM": "false", - } - - # Latest gcc only, not gcc4 and gcc6 - if prefix == "gcc": - fields["subpackages"] = f"g++-{arch}:gpp" \ - f" libstdc++-dev-{arch}:libcxx_dev" - - below_header = "CTARGET_ARCH=" + arch + """ - CTARGET="$(arch_to_hostspec ${CTARGET_ARCH})" - LANG_D=false - LANG_OBJC=false - LANG_JAVA=false - LANG_GO=false - LANG_FORTRAN=false - LANG_ADA=false - options="!strip" - - # abuild doesn't try to tries to install "build-base-$CTARGET_ARCH" - # when this variable matches "no*" - BOOTSTRAP="nobuildbase" - - # abuild will only cross compile when this variable is set, but it - # needs to find a valid package database in there for dependency - # resolving, so we set it to /. - CBUILDROOT="/" - - _cross_configure="--disable-bootstrap --with-sysroot=/usr/$CTARGET" - """ - - replace_simple = { - # Do not package libstdc++, do not add "g++-$ARCH" here (already - # did that explicitly in the subpackages variable above, so - # pmbootstrap picks it up properly). - '*subpackages="$subpackages libstdc++:libcxx:*': None, - - # We set the cross_configure variable at the beginning, so it does not - # use CBUILDROOT as sysroot. In the original APKBUILD this is a local - # variable, but we make it a global one. - '*_cross_configure=*': None, - - # Do not build foreign arch libgcc, we use the one from Alpine (#2168) - '_libgcc=true*': '_libgcc=false', - } - - pmb.aportgen.core.rewrite(args, pkgname, based_on, fields, - replace_simple=replace_simple, - below_header=below_header) diff --git a/pmb/aportgen/grub_efi.py b/pmb/aportgen/grub_efi.py deleted file mode 100644 index dde5dcfb..00000000 --- a/pmb/aportgen/grub_efi.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright 2023 Nick Reitemeyer, Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import pmb.aportgen.core -import pmb.build -import pmb.chroot.apk -import pmb.chroot.apk_static -import pmb.helpers.run -import pmb.parse.apkindex - - -def generate(args, pkgname): - arch = "x86" - if pkgname != "grub-efi-x86": - raise RuntimeError("only grub-efi-x86 is available") - package_data = pmb.parse.apkindex.package(args, "grub") - version = package_data["version"] - pkgver = version.split("-r")[0] - pkgrel = version.split("-r")[1] - - # Prepare aportgen tempdir inside and outside of chroot - tempdir = "/tmp/aportgen" - pmb.chroot.root(args, ["rm", "-rf", tempdir]) - pmb.helpers.run.user(args, ["mkdir", "-p", f"{args.work}/aportgen", - f"{args.work}/chroot_native/{tempdir}"]) - - # Write the APKBUILD - channel_cfg = pmb.config.pmaports.read_config_channel(args) - mirrordir = channel_cfg["mirrordir_alpine"] - apkbuild_path = f"{args.work}/chroot_native/{tempdir}/APKBUILD" - apk_name = f'"$srcdir/grub-efi-$pkgver-r$pkgrel-$_arch-{mirrordir}.apk"' - with open(apkbuild_path, "w", encoding="utf-8") as handle: - apkbuild = f"""\ - # Automatically generated aport, do not edit! - # Generator: pmbootstrap aportgen {pkgname} - - pkgname={pkgname} - pkgver={pkgver} - pkgrel={pkgrel} - - _arch="{arch}" - _mirror="{pmb.config.aportgen_mirror_alpine}" - - pkgdesc="GRUB $_arch EFI files for every architecture" - url="https://www.gnu.org/software/grub/" - license="GPL-3.0-or-later" - arch="{pmb.config.arch_native}" - source="grub-efi-$pkgver-r$pkgrel-$_arch-{mirrordir}.apk::$_mirror/{mirrordir}/main/$_arch/grub-efi-$pkgver-r$pkgrel.apk" - - package() {{ - mkdir -p "$pkgdir" - cd "$pkgdir" - tar -xf {apk_name} - rm .PKGINFO .SIGN.* - }} - """ - for line in apkbuild.split("\n"): - handle.write(line[12:].replace(" " * 4, "\t") + "\n") - - # Generate checksums - pmb.build.init_abuild_minimal(args) - pmb.chroot.root(args, ["chown", "-R", "pmos:pmos", tempdir]) - pmb.chroot.user(args, ["abuild", "checksum"], working_dir=tempdir) - pmb.helpers.run.user(args, ["cp", apkbuild_path, f"{args.work}/aportgen"]) diff --git a/pmb/aportgen/linux.py b/pmb/aportgen/linux.py deleted file mode 100644 index e8959584..00000000 --- a/pmb/aportgen/linux.py +++ /dev/null @@ -1,129 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import pmb.helpers.run -import pmb.aportgen.core -import pmb.parse.apkindex -import pmb.parse.arch - - -def generate_apkbuild(args, pkgname, deviceinfo, patches): - device = "-".join(pkgname.split("-")[1:]) - carch = pmb.parse.arch.alpine_to_kernel(deviceinfo["arch"]) - - makedepends = ["bash", "bc", "bison", "devicepkg-dev", "findutils", "flex", - "openssl-dev", "perl"] - - build = """ - unset LDFLAGS - make O="$_outdir" ARCH="$_carch" CC="${CC:-gcc}" \\ - KBUILD_BUILD_VERSION="$((pkgrel + 1 ))-postmarketOS\"""" - - package = """ - downstreamkernel_package "$builddir" "$pkgdir" "$_carch\" \\ - "$_flavor" "$_outdir\"""" - - if deviceinfo.get("header_version") == "2": - package += """ - - make dtbs_install O="$_outdir" ARCH="$_carch" \\ - INSTALL_DTBS_PATH="$pkgdir\"/boot/dtbs""" - - if deviceinfo["bootimg_qcdt"] == "true": - build += """\n - # Master DTB (deviceinfo_bootimg_qcdt)""" - vendors = ["spreadtrum", "exynos", "other"] - soc_vendor = pmb.helpers.cli.ask("SoC vendor", vendors, - vendors[-1], complete=vendors) - if soc_vendor == "spreadtrum": - makedepends.append("dtbtool-sprd") - build += """ - dtbTool-sprd -p "$_outdir/scripts/dtc/" \\ - -o "$_outdir/arch/$_carch/boot"/dt.img \\ - "$_outdir/arch/$_carch/boot/dts/\"""" - elif soc_vendor == "exynos": - codename = "-".join(pkgname.split("-")[2:]) - makedepends.append("dtbtool-exynos") - build += """ - dtbTool-exynos -o "$_outdir/arch/$_carch/boot"/dt.img \\ - $(find "$_outdir/arch/$_carch/boot/dts/\"""" - build += f" -name *{codename}*.dtb)" - else: - makedepends.append("dtbtool") - build += """ - dtbTool -o "$_outdir/arch/$_carch/boot"/dt.img \\ - "$_outdir/arch/$_carch/boot/\"""" - package += """ - install -Dm644 "$_outdir/arch/$_carch/boot"/dt.img \\ - "$pkgdir"/boot/dt.img""" - - makedepends.sort() - makedepends = ("\n" + " " * 12).join(makedepends) - patches = ("\n" + " " * 12).join(patches) - content = f"""\ - # Reference: - # Kernel config based on: arch/{carch}/configs/(CHANGEME!) - - pkgname={pkgname} - pkgver=3.x.x - pkgrel=0 - pkgdesc="{deviceinfo["name"]} kernel fork" - arch="{deviceinfo["arch"]}" - _carch="{carch}" - _flavor="{device}" - url="https://kernel.org" - license="GPL-2.0-only" - options="!strip !check !tracedeps pmb:cross-native" - makedepends=" - {makedepends} - " - - # Source - _repository="(CHANGEME!)" - _commit="ffffffffffffffffffffffffffffffffffffffff" - _config="config-$_flavor.$arch" - source=" - $pkgname-$_commit.tar.gz::https://github.com/LineageOS/$_repository/archive/$_commit.tar.gz - $_config - {patches} - " - builddir="$srcdir/$_repository-$_commit" - _outdir="out" - - prepare() {{ - default_prepare - . downstreamkernel_prepare - }} - - build() {{{build} - }} - - package() {{{package} - }} - - sha512sums="(run 'pmbootstrap checksum {pkgname}' to fill)" - """ - - # Write the file - with open(f"{args.work}/aportgen/APKBUILD", "w", encoding="utf-8") as hndl: - for line in content.rstrip().split("\n"): - hndl.write(line[8:].replace(" " * 4, "\t") + "\n") - - -def generate(args, pkgname): - device = "-".join(pkgname.split("-")[1:]) - deviceinfo = pmb.parse.deviceinfo(args, device) - - # Symlink commonly used patches - pmb.helpers.run.user(args, ["mkdir", "-p", args.work + "/aportgen"]) - patches = [ - "gcc7-give-up-on-ilog2-const-optimizations.patch", - "gcc8-fix-put-user.patch", - "gcc10-extern_YYLOC_global_declaration.patch", - "kernel-use-the-gnu89-standard-explicitly.patch", - ] - for patch in patches: - pmb.helpers.run.user(args, ["ln", "-s", - "../../.shared-patches/linux/" + patch, - args.work + "/aportgen/" + patch]) - - generate_apkbuild(args, pkgname, deviceinfo, patches) diff --git a/pmb/aportgen/musl.py b/pmb/aportgen/musl.py deleted file mode 100644 index 8a8e2fc8..00000000 --- a/pmb/aportgen/musl.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import pmb.aportgen.core -import pmb.build -import pmb.chroot.apk -import pmb.chroot.apk_static -import pmb.helpers.run -import pmb.parse.apkindex - - -def generate(args, pkgname): - arch = pkgname.split("-")[1] - - # Parse musl version from APKINDEX - package_data = pmb.parse.apkindex.package(args, "musl") - version = package_data["version"] - pkgver = version.split("-r")[0] - pkgrel = version.split("-r")[1] - - # Prepare aportgen tempdir inside and outside of chroot - tempdir = "/tmp/aportgen" - pmb.chroot.root(args, ["rm", "-rf", tempdir]) - pmb.helpers.run.user(args, ["mkdir", "-p", f"{args.work}/aportgen", - f"{args.work}/chroot_native/{tempdir}"]) - - # Write the APKBUILD - channel_cfg = pmb.config.pmaports.read_config_channel(args) - mirrordir = channel_cfg["mirrordir_alpine"] - apkbuild_path = f"{args.work}/chroot_native/{tempdir}/APKBUILD" - apk_name = f"$srcdir/musl-$pkgver-r$pkgrel-$_arch-{mirrordir}.apk" - apk_dev_name = f"$srcdir/musl-dev-$pkgver-r$pkgrel-$_arch-{mirrordir}.apk" - with open(apkbuild_path, "w", encoding="utf-8") as handle: - apkbuild = f"""\ - # Automatically generated aport, do not edit! - # Generator: pmbootstrap aportgen {pkgname} - - # Stub for apkbuild-lint - if [ -z "$(type -t arch_to_hostspec)" ]; then - arch_to_hostspec() {{ :; }} - fi - - pkgname={pkgname} - pkgver={pkgver} - pkgrel={pkgrel} - arch="{pmb.aportgen.get_cross_package_arches(pkgname)}" - subpackages="musl-dev-{arch}:package_dev" - - _arch="{arch}" - _mirror="{pmb.config.aportgen_mirror_alpine}" - - url="https://musl-libc.org" - license="MIT" - options="!check !strip" - pkgdesc="the musl library (lib c) implementation for $_arch" - - _target="$(arch_to_hostspec $_arch)" - - source=" - musl-$pkgver-r$pkgrel-$_arch-{mirrordir}.apk::$_mirror/{mirrordir}/main/$_arch/musl-$pkgver-r$pkgrel.apk - musl-dev-$pkgver-r$pkgrel-$_arch-{mirrordir}.apk::$_mirror/{mirrordir}/main/$_arch/musl-dev-$pkgver-r$pkgrel.apk - " - - package() {{ - mkdir -p "$pkgdir/usr/$_target" - cd "$pkgdir/usr/$_target" - # Use 'busybox tar' to avoid 'tar: Child returned status 141' - # on some machines (builds.sr.ht, gitlab-ci). See pmaports#26. - busybox tar -xf {apk_name} - rm .PKGINFO .SIGN.* - }} - package_dev() {{ - mkdir -p "$subpkgdir/usr/$_target" - cd "$subpkgdir/usr/$_target" - # Use 'busybox tar' to avoid 'tar: Child returned status 141' - # on some machines (builds.sr.ht, gitlab-ci). See pmaports#26. - busybox tar -xf {apk_dev_name} - rm .PKGINFO .SIGN.* - - # symlink everything from /usr/$_target/usr/* - # to /usr/$_target/* so the cross-compiler gcc does not fail - # to build. - for _dir in include lib; do - mkdir -p "$subpkgdir/usr/$_target/$_dir" - cd "$subpkgdir/usr/$_target/usr/$_dir" - for i in *; do - cd "$subpkgdir/usr/$_target/$_dir" - ln -s /usr/$_target/usr/$_dir/$i $i - done - done - }} - """ - for line in apkbuild.split("\n"): - handle.write(line[12:].replace(" " * 4, "\t") + "\n") - - # Generate checksums - pmb.build.init_abuild_minimal(args) - pmb.chroot.root(args, ["chown", "-R", "pmos:pmos", tempdir]) - pmb.chroot.user(args, ["abuild", "checksum"], working_dir=tempdir) - pmb.helpers.run.user(args, ["cp", apkbuild_path, f"{args.work}/aportgen"]) diff --git a/pmb/build/__init__.py b/pmb/build/__init__.py deleted file mode 100644 index fb764e52..00000000 --- a/pmb/build/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -from pmb.build.init import init, init_abuild_minimal, init_compiler -from pmb.build.envkernel import package_kernel -from pmb.build.kconfig import menuconfig -from pmb.build.newapkbuild import newapkbuild -from pmb.build.other import copy_to_buildpath, is_necessary, \ - index_repo -from pmb.build._package import mount_pmaports, package diff --git a/pmb/build/_package.py b/pmb/build/_package.py deleted file mode 100644 index fbe48e08..00000000 --- a/pmb/build/_package.py +++ /dev/null @@ -1,518 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import datetime -import logging -import os - -import pmb.build -import pmb.build.autodetect -import pmb.chroot -import pmb.chroot.apk -import pmb.helpers.pmaports -import pmb.helpers.repo -import pmb.parse -import pmb.parse.arch - - -def skip_already_built(pkgname, arch): - """ - Check if the package was already built in this session, and add it - to the cache in case it was not built yet. - - :returns: True when it can be skipped or False - """ - if arch not in pmb.helpers.other.cache["built"]: - pmb.helpers.other.cache["built"][arch] = [] - if pkgname in pmb.helpers.other.cache["built"][arch]: - logging.verbose(pkgname + ": already checked this session," - " no need to build it or its dependencies") - return True - - logging.verbose(f"{pkgname}: marking as already built") - pmb.helpers.other.cache["built"][arch].append(pkgname) - return False - - -def get_apkbuild(args, pkgname, arch): - """ - Parse the APKBUILD path for pkgname. When there is none, try to find it in - the binary package APKINDEX files or raise an exception. - - :param pkgname: package name to be built, as specified in the APKBUILD - :returns: None or parsed APKBUILD - """ - # Get existing binary package indexes - pmb.helpers.repo.update(args, arch) - - # Get pmaport, skip upstream only packages - pmaport = pmb.helpers.pmaports.get(args, pkgname, False) - if pmaport: - return pmaport - if pmb.parse.apkindex.providers(args, pkgname, arch, False): - return None - raise RuntimeError("Package '" + pkgname + "': Could not find aport, and" - " could not find this package in any APKINDEX!") - - -def check_build_for_arch(args, pkgname, arch): - """ - Check if pmaport can be built or exists as binary for a specific arch. - :returns: * True when it can be built - * False when it can't be built, but exists in a binary repo - (e.g. temp/mesa can't be built for x86_64, but Alpine has it) - :raises: RuntimeError if the package can't be built for the given arch and - does not exist as binary package. - """ - # Check for pmaport with arch - if pmb.helpers.package.check_arch(args, pkgname, arch, False): - return True - - # Check for binary package - binary = pmb.parse.apkindex.package(args, pkgname, arch, False) - if binary: - pmaport = pmb.helpers.pmaports.get(args, pkgname) - pmaport_version = pmaport["pkgver"] + "-r" + pmaport["pkgrel"] - logging.debug(pkgname + ": found pmaport (" + pmaport_version + ") and" - " binary package (" + binary["version"] + ", from" - " postmarketOS or Alpine), but pmaport can't be built" - " for " + arch + " -> using binary package") - return False - - # No binary package exists and can't build it - logging.info("NOTE: You can edit the 'arch=' line inside the APKBUILD") - if args.action == "build": - logging.info("NOTE: Alternatively, use --arch to build for another" - " architecture ('pmbootstrap build --arch=armhf " + - pkgname + "')") - raise RuntimeError("Can't build '" + pkgname + "' for architecture " + - arch) - - -def get_depends(args, apkbuild): - """ - Alpine's abuild always builds/installs the "depends" and "makedepends" - of a package before building it. We used to only care about "makedepends" - and it's still possible to ignore the depends with --ignore-depends. - - :returns: list of dependency pkgnames (eg. ["sdl2", "sdl2_net"]) - """ - # Read makedepends and depends - ret = list(apkbuild["makedepends"]) - if "!check" not in apkbuild["options"]: - ret += apkbuild["checkdepends"] - if "ignore_depends" not in args or not args.ignore_depends: - ret += apkbuild["depends"] - ret = sorted(set(ret)) - - # Don't recurse forever when a package depends on itself (#948) - for pkgname in ([apkbuild["pkgname"]] + - list(apkbuild["subpackages"].keys())): - if pkgname in ret: - logging.verbose(apkbuild["pkgname"] + ": ignoring dependency on" - " itself: " + pkgname) - ret.remove(pkgname) - return ret - - -def build_depends(args, apkbuild, arch, strict): - """ - Get and build dependencies with verbose logging messages. - - :returns: (depends, depends_built) - """ - # Get dependencies - pkgname = apkbuild["pkgname"] - depends = get_depends(args, apkbuild) - logging.verbose(pkgname + ": build/install dependencies: " + - ", ".join(depends)) - - # --no-depends: check for binary packages - depends_built = [] - if "no_depends" in args and args.no_depends: - pmb.helpers.repo.update(args, arch) - for depend in depends: - # Ignore conflicting dependencies - if depend.startswith("!"): - continue - # Check if binary package is missing - if not pmb.parse.apkindex.package(args, depend, arch, False): - raise RuntimeError("Missing binary package for dependency '" + - depend + "' of '" + pkgname + "', but" - " pmbootstrap won't build any depends since" - " it was started with --no-depends.") - # Check if binary package is outdated - apkbuild_dep = get_apkbuild(args, depend, arch) - if apkbuild_dep and \ - pmb.build.is_necessary(args, arch, apkbuild_dep): - raise RuntimeError(f"Binary package for dependency '{depend}'" - f" of '{pkgname}' is outdated, but" - f" pmbootstrap won't build any depends" - f" since it was started with --no-depends.") - else: - # Build the dependencies - for depend in depends: - if depend.startswith("!"): - continue - if package(args, depend, arch, strict=strict): - depends_built += [depend] - logging.verbose(pkgname + ": build dependencies: done, built: " + - ", ".join(depends_built)) - - return (depends, depends_built) - - -def is_necessary_warn_depends(args, apkbuild, arch, force, depends_built): - """ - Check if a build is necessary, and warn if it is not, but there were - dependencies built. - - :returns: True or False - """ - pkgname = apkbuild["pkgname"] - - # Check if necessary (this warns about binary version > aport version, so - # call it even in force mode) - ret = pmb.build.is_necessary(args, arch, apkbuild) - if force: - ret = True - - if not ret and len(depends_built): - logging.verbose(f"{pkgname}: depends on rebuilt package(s): " - f" {', '.join(depends_built)}") - - logging.verbose(pkgname + ": build necessary: " + str(ret)) - return ret - - -def init_buildenv(args, apkbuild, arch, strict=False, force=False, cross=None, - suffix="native", skip_init_buildenv=False, src=None): - """ - Build all dependencies, check if we need to build at all (otherwise we've - just initialized the build environment for nothing) and then setup the - whole build environment (abuild, gcc, dependencies, cross-compiler). - - :param cross: None, "native", or "crossdirect" - :param skip_init_buildenv: can be set to False to avoid initializing the - build environment. Use this when building - something during initialization of the build - environment (e.g. qemu aarch64 bug workaround) - :param src: override source used to build the package with a local folder - :returns: True when the build is necessary (otherwise False) - """ - - depends_arch = arch - if cross == "native": - depends_arch = pmb.config.arch_native - - # Build dependencies - depends, built = build_depends(args, apkbuild, depends_arch, strict) - - # Check if build is necessary - if not is_necessary_warn_depends(args, apkbuild, arch, force, built): - return False - - # Install and configure abuild, ccache, gcc, dependencies - if not skip_init_buildenv: - pmb.build.init(args, suffix) - pmb.build.other.configure_abuild(args, suffix) - if args.ccache: - pmb.build.other.configure_ccache(args, suffix) - if "rust" in depends or "cargo" in depends: - pmb.chroot.apk.install(args, ["sccache"], suffix) - if not strict and "pmb:strict" not in apkbuild["options"] and len(depends): - pmb.chroot.apk.install(args, depends, suffix) - if src: - pmb.chroot.apk.install(args, ["rsync"], suffix) - - # Cross-compiler init - if cross: - pmb.build.init_compiler(args, depends, cross, arch) - if cross == "crossdirect": - pmb.chroot.mount_native_into_foreign(args, suffix) - - return True - - -def get_pkgver(original_pkgver, original_source=False, now=None): - """ - Get the original pkgver when using the original source. Otherwise, get the - pkgver with an appended suffix of current date and time. For example: - _p20180218550502 - When appending the suffix, an existing suffix (e.g. _git20171231) gets - replaced. - - :param original_pkgver: unmodified pkgver from the package's APKBUILD. - :param original_source: the original source is used instead of overriding - it with --src. - :param now: use a specific date instead of current date (for test cases) - """ - if original_source: - return original_pkgver - - # Append current date - no_suffix = original_pkgver.split("_", 1)[0] - now = now if now else datetime.datetime.now() - new_suffix = "_p" + now.strftime("%Y%m%d%H%M%S") - return no_suffix + new_suffix - - -def override_source(args, apkbuild, pkgver, src, suffix="native"): - """ - Mount local source inside chroot and append new functions (prepare() etc.) - to the APKBUILD to make it use the local source. - """ - if not src: - return - - # Mount source in chroot - mount_path = "/mnt/pmbootstrap/source-override/" - mount_path_outside = args.work + "/chroot_" + suffix + mount_path - pmb.helpers.mount.bind(args, src, mount_path_outside, umount=True) - - # Delete existing append file - append_path = "/tmp/APKBUILD.append" - append_path_outside = args.work + "/chroot_" + suffix + append_path - if os.path.exists(append_path_outside): - pmb.chroot.root(args, ["rm", append_path], suffix) - - # Add src path to pkgdesc, cut it off after max length - pkgdesc = ("[" + src + "] " + apkbuild["pkgdesc"])[:127] - - # Appended content - append = """ - # ** Overrides below appended by pmbootstrap for --src ** - - pkgver=\"""" + pkgver + """\" - pkgdesc=\"""" + pkgdesc + """\" - _pmb_src_copy="/tmp/pmbootstrap-local-source-copy" - - # Empty $source avoids patching in prepare() - _pmb_source_original="$source" - source="" - sha512sums="" - - fetch() { - # Update source copy - msg "Copying source from host system: """ + src + """\" - rsync -a --exclude=".git/" --delete --ignore-errors --force \\ - \"""" + mount_path + """\" "$_pmb_src_copy" || true - - # Link local source files (e.g. kernel config) - mkdir "$srcdir" - local s - for s in $_pmb_source_original; do - is_remote "$s" || ln -sf "$startdir/$s" "$srcdir/" - done - } - - unpack() { - ln -sv "$_pmb_src_copy" "$builddir" - } - """ - - # Write and log append file - with open(append_path_outside, "w", encoding="utf-8") as handle: - for line in append.split("\n"): - handle.write(line[13:].replace(" " * 4, "\t") + "\n") - pmb.chroot.user(args, ["cat", append_path], suffix) - - # Append it to the APKBUILD - apkbuild_path = "/home/pmos/build/APKBUILD" - shell_cmd = ("cat " + apkbuild_path + " " + append_path + " > " + - append_path + "_") - pmb.chroot.user(args, ["sh", "-c", shell_cmd], suffix) - pmb.chroot.user(args, ["mv", append_path + "_", apkbuild_path], suffix) - - -def mount_pmaports(args, destination, suffix="native"): - """ - Mount pmaports.git in chroot. - - :param destination: mount point inside the chroot - """ - outside_destination = args.work + "/chroot_" + suffix + destination - pmb.helpers.mount.bind(args, args.aports, outside_destination, umount=True) - - -def link_to_git_dir(args, suffix): - """ - Make /home/pmos/build/.git point to the .git dir from pmaports.git, with a - symlink so abuild does not fail (#1841). - - abuild expects the current working directory to be a subdirectory of a - cloned git repository (e.g. main/openrc from aports.git). If git is - installed, it will try to get the last git commit from that repository, and - place it in the resulting apk (.PKGINFO) as well as use the date from that - commit as SOURCE_DATE_EPOCH (for reproducible builds). - - With that symlink, we actually make it use the last git commit from - pmaports.git for SOURCE_DATE_EPOCH and have that in the resulting apk's - .PKGINFO. - """ - # Mount pmaports.git in chroot, in case the user did not use pmbootstrap to - # clone it (e.g. how we build on sourcehut). Do this here and not at the - # initialization of the chroot, because the pmaports dir may not exist yet - # at that point. Use umount=True, so we don't have an old path mounted - # (some tests change the pmaports dir). - destination = "/mnt/pmaports" - mount_pmaports(args, destination, suffix) - - # Create .git symlink - pmb.chroot.user(args, ["mkdir", "-p", "/home/pmos/build"], suffix) - pmb.chroot.user(args, ["ln", "-sf", destination + "/.git", - "/home/pmos/build/.git"], suffix) - - -def run_abuild(args, apkbuild, arch, strict=False, force=False, cross=None, - suffix="native", src=None): - """ - Set up all environment variables and construct the abuild command (all - depending on the cross-compiler method and target architecture), copy - the aport to the chroot and execute abuild. - - :param cross: None, "native", or "crossdirect" - :param src: override source used to build the package with a local folder - :returns: (output, cmd, env), output is the destination apk path relative - to the package folder ("x86_64/hello-1-r2.apk"). cmd and env are - used by the test case, and they are the full abuild command and - the environment variables dict generated in this function. - """ - # Sanity check - if cross == "native" and "!tracedeps" not in apkbuild["options"]: - logging.info("WARNING: Option !tracedeps is not set, but we're" - " cross-compiling in the native chroot. This will" - " probably fail!") - - # Pretty log message - pkgver = get_pkgver(apkbuild["pkgver"], src is None) - output = (arch + "/" + apkbuild["pkgname"] + "-" + pkgver + - "-r" + apkbuild["pkgrel"] + ".apk") - message = "(" + suffix + ") build " + output - if src: - message += " (source: " + src + ")" - logging.info(message) - - # Environment variables - env = {"CARCH": arch, - "SUDO_APK": "abuild-apk --no-progress"} - if cross == "native": - hostspec = pmb.parse.arch.alpine_to_hostspec(arch) - env["CROSS_COMPILE"] = hostspec + "-" - env["CC"] = hostspec + "-gcc" - if cross == "crossdirect": - env["PATH"] = ":".join(["/native/usr/lib/crossdirect/" + arch, - pmb.config.chroot_path]) - if not args.ccache: - env["CCACHE_DISABLE"] = "1" - - # Use sccache without crossdirect (crossdirect uses it via rustc.sh) - if args.ccache and cross != "crossdirect": - env["RUSTC_WRAPPER"] = "/usr/bin/sccache" - - # Cache binary objects from go in this path (like ccache) - env["GOCACHE"] = "/home/pmos/.cache/go-build" - - # Cache go modules (git repositories). Usually these should be bundled and - # it should not be required to download them at build time, in that case - # the APKBUILD sets the GOPATH (and therefore indirectly GOMODCACHE). But - # e.g. when using --src they are not bundled, in that case it makes sense - # to point GOMODCACHE at pmbootstrap's work dir so the modules are only - # downloaded once. - if args.go_mod_cache: - env["GOMODCACHE"] = "/home/pmos/go/pkg/mod" - - # Build the abuild command - cmd = ["abuild", "-D", "postmarketOS"] - if strict or "pmb:strict" in apkbuild["options"]: - if not strict: - logging.debug(apkbuild["pkgname"] + ": 'pmb:strict' found in" - " options, building in strict mode") - cmd += ["-r"] # install depends with abuild - else: - cmd += ["-d"] # do not install depends with abuild - if force: - cmd += ["-f"] - - # Copy the aport to the chroot and build it - pmb.build.copy_to_buildpath(args, apkbuild["pkgname"], suffix) - override_source(args, apkbuild, pkgver, src, suffix) - link_to_git_dir(args, suffix) - pmb.chroot.user(args, cmd, suffix, "/home/pmos/build", env=env) - return (output, cmd, env) - - -def finish(args, apkbuild, arch, output, strict=False, suffix="native"): - """ - Various finishing tasks that need to be done after a build. - """ - # Verify output file - channel = pmb.config.pmaports.read_config(args)["channel"] - path = f"{args.work}/packages/{channel}/{output}" - if not os.path.exists(path): - raise RuntimeError("Package not found after build: " + path) - - # Clear APKINDEX cache (we only parse APKINDEX files once per session and - # cache the result for faster dependency resolving, but after we built a - # package we need to parse it again) - pmb.parse.apkindex.clear_cache(f"{args.work}/packages/{channel}" - f"/{arch}/APKINDEX.tar.gz") - - # Uninstall build dependencies (strict mode) - if strict or "pmb:strict" in apkbuild["options"]: - logging.info("(" + suffix + ") uninstall build dependencies") - pmb.chroot.user(args, ["abuild", "undeps"], suffix, "/home/pmos/build", - env={"SUDO_APK": "abuild-apk --no-progress"}) - # If the build depends contain postmarketos-keys or postmarketos-base, - # abuild will have removed the postmarketOS repository key (pma#1230) - pmb.chroot.init_keys(args) - - -def package(args, pkgname, arch=None, force=False, strict=False, - skip_init_buildenv=False, src=None): - """ - Build a package and its dependencies with Alpine Linux' abuild. - - If this function is called multiple times on the same pkgname but first - with force=False and then force=True the force argument will be ignored due - to the package cache. - See the skip_already_built() call below. - - :param pkgname: package name to be built, as specified in the APKBUILD - :param arch: architecture we're building for (default: native) - :param force: always build, even if not necessary - :param strict: avoid building with irrelevant dependencies installed by - letting abuild install and uninstall all dependencies. - :param skip_init_buildenv: can be set to False to avoid initializing the - build environment. Use this when building - something during initialization of the build - environment (e.g. qemu aarch64 bug workaround) - :param src: override source used to build the package with a local folder - :returns: None if the build was not necessary - output path relative to the packages folder ("armhf/ab-1-r2.apk") - """ - logging.verbose(f"{pkgname}: running pmb.build._package.package") - - # Once per session is enough - arch = arch or pmb.config.arch_native - if skip_already_built(pkgname, arch): - return - - # Only build when APKBUILD exists - apkbuild = get_apkbuild(args, pkgname, arch) - if not apkbuild: - return - - # Detect the build environment (skip unnecessary builds) - if not check_build_for_arch(args, pkgname, arch): - return - suffix = pmb.build.autodetect.suffix(apkbuild, arch) - cross = pmb.build.autodetect.crosscompile(args, apkbuild, arch, suffix) - if not init_buildenv(args, apkbuild, arch, strict, force, cross, suffix, - skip_init_buildenv, src): - return - - # Build and finish up - (output, cmd, env) = run_abuild(args, apkbuild, arch, strict, force, cross, - suffix, src) - finish(args, apkbuild, arch, output, strict, suffix) - return output diff --git a/pmb/build/autodetect.py b/pmb/build/autodetect.py deleted file mode 100644 index 953f0062..00000000 --- a/pmb/build/autodetect.py +++ /dev/null @@ -1,95 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import os - -import pmb.config -import pmb.chroot.apk -import pmb.helpers.pmaports -import pmb.parse.arch - - -def arch_from_deviceinfo(args, pkgname, aport): - """ - The device- packages are noarch packages. But it only makes sense to build - them for the device's architecture, which is specified in the deviceinfo - file. - - :returns: None (no deviceinfo file) - arch from the deviceinfo (e.g. "armhf") - """ - # Require a deviceinfo file in the aport - if not pkgname.startswith("device-"): - return - deviceinfo = aport + "/deviceinfo" - if not os.path.exists(deviceinfo): - return - - # Return its arch - device = pkgname.split("-", 1)[1] - arch = pmb.parse.deviceinfo(args, device)["arch"] - logging.verbose(pkgname + ": arch from deviceinfo: " + arch) - return arch - - -def arch(args, pkgname): - """ - Find a good default in case the user did not specify for which architecture - a package should be built. - - :returns: arch string like "x86_64" or "armhf". Preferred order, depending - on what is supported by the APKBUILD: - * native arch - * device arch (this will be preferred instead if build_default_device_arch is true) - * first arch in the APKBUILD - """ - aport = pmb.helpers.pmaports.find(args, pkgname) - ret = arch_from_deviceinfo(args, pkgname, aport) - if ret: - return ret - - apkbuild = pmb.parse.apkbuild(f"{aport}/APKBUILD") - arches = apkbuild["arch"] - - if args.build_default_device_arch: - preferred_arch = args.deviceinfo["arch"] - preferred_arch_2nd = pmb.config.arch_native - else: - preferred_arch = pmb.config.arch_native - preferred_arch_2nd = args.deviceinfo["arch"] - - if "noarch" in arches or "all" in arches or preferred_arch in arches: - return preferred_arch - - if preferred_arch_2nd in arches: - return preferred_arch_2nd - - try: - return apkbuild["arch"][0] - except IndexError: - return None - - -def suffix(apkbuild, arch): - if arch == pmb.config.arch_native: - return "native" - - if "pmb:cross-native" in apkbuild["options"]: - return "native" - - return "buildroot_" + arch - - -def crosscompile(args, apkbuild, arch, suffix): - """ - :returns: None, "native", "crossdirect" - """ - if not args.cross: - return None - if not pmb.parse.arch.cpu_emulation_required(arch): - return None - if suffix == "native": - return "native" - if "!pmb:crossdirect" in apkbuild["options"]: - return None - return "crossdirect" diff --git a/pmb/build/checksum.py b/pmb/build/checksum.py deleted file mode 100644 index 921ed676..00000000 --- a/pmb/build/checksum.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging - -import pmb.chroot -import pmb.build -import pmb.helpers.run -import pmb.helpers.pmaports - - -def update(args, pkgname): - """ Fetch all sources and update the checksums in the APKBUILD. """ - pmb.build.init_abuild_minimal(args) - pmb.build.copy_to_buildpath(args, pkgname) - logging.info("(native) generate checksums for " + pkgname) - pmb.chroot.user(args, ["abuild", "checksum"], - working_dir="/home/pmos/build") - - # Copy modified APKBUILD back - source = args.work + "/chroot_native/home/pmos/build/APKBUILD" - target = pmb.helpers.pmaports.find(args, pkgname) + "/" - pmb.helpers.run.user(args, ["cp", source, target]) - - -def verify(args, pkgname): - """ Fetch all sources and verify their checksums. """ - pmb.build.init_abuild_minimal(args) - pmb.build.copy_to_buildpath(args, pkgname) - logging.info("(native) verify checksums for " + pkgname) - - # Fetch and verify sources, "fetch" alone does not verify them: - # https://github.com/alpinelinux/abuild/pull/86 - pmb.chroot.user(args, ["abuild", "fetch", "verify"], - working_dir="/home/pmos/build") diff --git a/pmb/build/envkernel.py b/pmb/build/envkernel.py deleted file mode 100644 index b04fcbba..00000000 --- a/pmb/build/envkernel.py +++ /dev/null @@ -1,236 +0,0 @@ -# Copyright 2023 Robert Yang -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import os -import re - -import pmb.aportgen -import pmb.build -import pmb.chroot -import pmb.helpers -import pmb.helpers.pmaports -import pmb.parse - - -def match_kbuild_out(word): - """ - Look for paths in the following formats: - "//arch//boot" - "//include/config/kernel.release" - - :param word: space separated string cut out from a line from an APKBUILD - function body that might be the kbuild output path - :returns: kernel build output directory. - empty string when a separate build output directory isn't used. - None, when no output directory is found. - """ - prefix = "^\\\"?\\$({?builddir}?|{?srcdir}?)\\\"?/" - kbuild_out = "(.*\\/)*" - - postfix = "(arch\\/.*\\/boot.*)\\\"?$" - match = re.match(prefix + kbuild_out + postfix, word) - - if match is None: - postfix = "(include\\/config\\/kernel\\.release)\\\"?$" - match = re.match(prefix + kbuild_out + postfix, word) - - if match is None: - return None - - groups = match.groups() - if groups is None or len(groups) != 3: - return None - - logging.debug("word = " + str(word)) - logging.debug("regex match groups = " + str(groups)) - out_dir = groups[1] - return "" if out_dir is None else out_dir.strip("/") - - -def find_kbuild_output_dir(function_body): - """ - Guess what the kernel build output directory is. Parses each line of the - function word by word, looking for paths which contain the kbuild output - directory. - - :param function_body: contents of a function from the kernel APKBUILD - :returns: kbuild output dir - None, when output dir is not found - """ - - guesses = [] - for line in function_body: - for item in line.split(): - kbuild_out = match_kbuild_out(item) - if kbuild_out is not None: - guesses.append(kbuild_out) - break - - # Check if guesses are all the same - it = iter(guesses) - first = next(it, None) - if first is None: - raise RuntimeError("Couldn't find a kbuild out directory. Is your " - "APKBUILD messed up? If not, then consider " - "adjusting the patterns in pmb/build/envkernel.py " - "to work with your APKBUILD, or submit an issue.") - if all(first == rest for rest in it): - return first - raise RuntimeError("Multiple kbuild out directories found. Can you modify " - "your APKBUILD so it only has one output path? If you " - "can't resolve it, please open an issue.") - - -def modify_apkbuild(args, pkgname, aport): - """ - Modify kernel APKBUILD to package build output from envkernel.sh - """ - apkbuild_path = aport + "/APKBUILD" - apkbuild = pmb.parse.apkbuild(apkbuild_path) - if os.path.exists(args.work + "/aportgen"): - pmb.helpers.run.user(args, ["rm", "-r", args.work + "/aportgen"]) - - pmb.helpers.run.user(args, ["mkdir", args.work + "/aportgen"]) - pmb.helpers.run.user(args, ["cp", "-r", apkbuild_path, - args.work + "/aportgen"]) - - pkgver = pmb.build._package.get_pkgver(apkbuild["pkgver"], - original_source=False) - fields = {"pkgver": pkgver, - "pkgrel": "0", - "subpackages": "", - "builddir": "/home/pmos/build/src"} - - pmb.aportgen.core.rewrite(args, pkgname, apkbuild_path, fields=fields) - - -def host_build_bindmount(args, chroot, flag_file, mount=False): - """ - Check if the bind mount already exists and unmount it. - Then bindmount the current directory into the chroot as - /mnt/linux so it can be used by the envkernel abuild wrapper - """ - flag_path = f"{chroot}/{flag_file}" - if os.path.exists(flag_path): - logging.info("Cleaning up kernel sources bind-mount") - pmb.helpers.run.root(args, ["umount", chroot + "/mnt/linux"], check=False) - pmb.helpers.run.root(args, ["rm", flag_path]) - - if mount: - pmb.helpers.mount.bind(args, ".", f"{chroot}/mnt/linux") - pmb.helpers.run.root(args, ["touch", flag_path]) - - -def run_abuild(args, pkgname, arch, apkbuild_path, kbuild_out): - """ - Prepare build environment and run abuild. - - :param pkgname: package name of a linux kernel aport - :param arch: architecture for the kernel - :param apkbuild_path: path to APKBUILD of the kernel aport - :param kbuild_out: kernel build system output sub-directory - """ - chroot = args.work + "/chroot_native" - build_path = "/home/pmos/build" - kbuild_out_source = "/mnt/linux/.output" - - # If the kernel was cross-compiled on the host rather than with the envkernel - # helper, we can still use the envkernel logic to package the artifacts for - # development, making it easy to quickly sideload a new kernel or pmbootstrap - # to create a boot image - # This handles bind mounting the current directory (assumed to be kernel sources) - # into the chroot so we can run abuild against it for the currently selected - # devices kernel package. - flag_file = "envkernel-bind-mounted" - host_build = False - - if not pmb.helpers.mount.ismount(chroot + "/mnt/linux"): - logging.info("envkernel.sh hasn't run, assuming the kernel was cross compiled" - "on host and using current dir as source") - host_build = True - - host_build_bindmount(args, chroot, flag_file, mount=host_build) - - if not os.path.exists(chroot + kbuild_out_source): - raise RuntimeError("No '.output' dir found in your kernel source dir. " - "Compile the " + args.device + " kernel first and " - "then try again. See https://postmarketos.org/envkernel" - "for details. If building on your host and only using " - "--envkernel for packaging, make sure you have O=.output " - "as an argument to make.") - - # Create working directory for abuild - pmb.build.copy_to_buildpath(args, pkgname) - - # Create symlink from abuild working directory to envkernel build directory - build_output = "" if kbuild_out == "" else "/" + kbuild_out - if build_output != "": - if os.path.islink(chroot + "/mnt/linux/" + build_output) and \ - os.path.lexists(chroot + "/mnt/linux/" + build_output): - pmb.chroot.root(args, ["rm", "/mnt/linux/" + build_output]) - pmb.chroot.root(args, ["ln", "-s", "/mnt/linux", - build_path + "/src"]) - pmb.chroot.root(args, ["ln", "-s", kbuild_out_source, - build_path + "/src" + build_output]) - - cmd = ["cp", apkbuild_path, chroot + build_path + "/APKBUILD"] - pmb.helpers.run.root(args, cmd) - - # Create the apk package - env = {"CARCH": arch, - "CHOST": arch, - "CBUILD": pmb.config.arch_native, - "SUDO_APK": "abuild-apk --no-progress"} - cmd = ["abuild", "rootpkg"] - pmb.chroot.user(args, cmd, working_dir=build_path, env=env) - - # Clean up bindmount if needed - host_build_bindmount(args, chroot, flag_file) - - # Clean up symlinks - if build_output != "": - if os.path.islink(chroot + "/mnt/linux/" + build_output) and \ - os.path.lexists(chroot + "/mnt/linux/" + build_output): - pmb.chroot.root(args, ["rm", "/mnt/linux/" + build_output]) - pmb.chroot.root(args, ["rm", build_path + "/src"]) - - -def package_kernel(args): - """ - Frontend for 'pmbootstrap build --envkernel': creates a package from - envkernel output. - """ - pkgname = args.packages[0] - if len(args.packages) > 1 or not pkgname.startswith("linux-"): - raise RuntimeError("--envkernel needs exactly one linux-* package as " - "argument.") - - aport = pmb.helpers.pmaports.find(args, pkgname) - - modify_apkbuild(args, pkgname, aport) - apkbuild_path = args.work + "/aportgen/APKBUILD" - - arch = args.deviceinfo["arch"] - apkbuild = pmb.parse.apkbuild(apkbuild_path, check_pkgname=False) - if apkbuild["_outdir"]: - kbuild_out = apkbuild["_outdir"] - else: - function_body = pmb.parse.function_body(aport + "/APKBUILD", "package") - kbuild_out = find_kbuild_output_dir(function_body) - suffix = pmb.build.autodetect.suffix(apkbuild, arch) - - # Install package dependencies - depends, _ = pmb.build._package.build_depends( - args, apkbuild, pmb.config.arch_native, strict=False) - pmb.build.init(args, suffix) - if pmb.parse.arch.cpu_emulation_required(arch): - depends.append("binutils-" + arch) - pmb.chroot.apk.install(args, depends, suffix) - - output = (arch + "/" + apkbuild["pkgname"] + "-" + apkbuild["pkgver"] + - "-r" + apkbuild["pkgrel"] + ".apk") - message = "(" + suffix + ") build " + output - logging.info(message) - - run_abuild(args, pkgname, arch, apkbuild_path, kbuild_out) - pmb.build.other.index_repo(args, arch) diff --git a/pmb/build/init.py b/pmb/build/init.py deleted file mode 100644 index 5de698bb..00000000 --- a/pmb/build/init.py +++ /dev/null @@ -1,118 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import glob -import logging -import os -import pathlib - -import pmb.build -import pmb.config -import pmb.chroot -import pmb.chroot.apk -import pmb.helpers.run - - -def init_abuild_minimal(args, suffix="native"): - """ Initialize a minimal chroot with abuild where one can do - 'abuild checksum'. """ - marker = f"{args.work}/chroot_{suffix}/tmp/pmb_chroot_abuild_init_done" - if os.path.exists(marker): - return - - pmb.chroot.apk.install(args, ["abuild"], suffix, build=False) - - # Fix permissions - pmb.chroot.root(args, ["chown", "root:abuild", - "/var/cache/distfiles"], suffix) - pmb.chroot.root(args, ["chmod", "g+w", - "/var/cache/distfiles"], suffix) - - # Add user to group abuild - pmb.chroot.root(args, ["adduser", "pmos", "abuild"], suffix) - - pathlib.Path(marker).touch() - - -def init(args, suffix="native"): - """ Initialize a chroot for building packages with abuild. """ - marker = f"{args.work}/chroot_{suffix}/tmp/pmb_chroot_build_init_done" - if os.path.exists(marker): - return - - init_abuild_minimal(args, suffix) - - # Initialize chroot, install packages - pmb.chroot.apk.install(args, pmb.config.build_packages, suffix, - build=False) - - # Generate package signing keys - chroot = args.work + "/chroot_" + suffix - if not os.path.exists(args.work + "/config_abuild/abuild.conf"): - logging.info("(" + suffix + ") generate abuild keys") - pmb.chroot.user(args, ["abuild-keygen", "-n", "-q", "-a"], - suffix, env={"PACKAGER": "pmos "}) - - # Copy package signing key to /etc/apk/keys - for key in glob.glob(chroot + - "/mnt/pmbootstrap/abuild-config/*.pub"): - key = key[len(chroot):] - pmb.chroot.root(args, ["cp", key, "/etc/apk/keys/"], suffix) - - # Add gzip wrapper that converts '-9' to '-1' - if not os.path.exists(chroot + "/usr/local/bin/gzip"): - with open(chroot + "/tmp/gzip_wrapper.sh", "w") as handle: - content = """ - #!/bin/sh - # Simple wrapper that converts -9 flag for gzip to -1 for - # speed improvement with abuild. FIXME: upstream to abuild - # with a flag! - args="" - for arg in "$@"; do - [ "$arg" == "-9" ] && arg="-1" - args="$args $arg" - done - /bin/gzip $args - """ - lines = content.split("\n")[1:] - for i in range(len(lines)): - lines[i] = lines[i][16:] - handle.write("\n".join(lines)) - pmb.chroot.root(args, ["cp", "/tmp/gzip_wrapper.sh", - "/usr/local/bin/gzip"], suffix) - pmb.chroot.root(args, ["chmod", "+x", "/usr/local/bin/gzip"], suffix) - - # abuild.conf: Don't clean the build folder after building, so we can - # inspect it afterwards for debugging - pmb.chroot.root(args, ["sed", "-i", "-e", "s/^CLEANUP=.*/CLEANUP=''/", - "/etc/abuild.conf"], suffix) - - # abuild.conf: Don't clean up installed packages in strict mode, so - # abuild exits directly when pressing ^C in pmbootstrap. - pmb.chroot.root(args, ["sed", "-i", "-e", - "s/^ERROR_CLEANUP=.*/ERROR_CLEANUP=''/", - "/etc/abuild.conf"], suffix) - - pathlib.Path(marker).touch() - - -def init_compiler(args, depends, cross, arch): - cross_pkgs = ["ccache-cross-symlinks"] - if "gcc4" in depends: - cross_pkgs += ["gcc4-" + arch] - elif "gcc6" in depends: - cross_pkgs += ["gcc6-" + arch] - else: - cross_pkgs += ["gcc-" + arch, "g++-" + arch] - if "clang" in depends or "clang-dev" in depends: - cross_pkgs += ["clang"] - if cross == "crossdirect": - cross_pkgs += ["crossdirect"] - if "rust" in depends or "cargo" in depends: - if args.ccache: - cross_pkgs += ["sccache"] - # crossdirect for rust installs all build dependencies in the - # native chroot too, as some of them can be required for building - # native macros / build scripts - cross_pkgs += depends - - pmb.chroot.apk.install(args, cross_pkgs) diff --git a/pmb/build/kconfig.py b/pmb/build/kconfig.py deleted file mode 100644 index 1640f30c..00000000 --- a/pmb/build/kconfig.py +++ /dev/null @@ -1,164 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import logging - -import pmb.build -import pmb.build.autodetect -import pmb.build.checksum -import pmb.chroot -import pmb.chroot.apk -import pmb.chroot.other -import pmb.helpers.pmaports -import pmb.helpers.run -import pmb.parse - - -def get_arch(apkbuild): - """ - Take the architecture from the APKBUILD or complain if it's ambiguous. This - function only gets called if --arch is not set. - - :param apkbuild: looks like: {"pkgname": "linux-...", - "arch": ["x86_64", "armhf", "aarch64"]} - or: {"pkgname": "linux-...", "arch": ["armhf"]} - """ - pkgname = apkbuild["pkgname"] - - # Disabled package (arch="") - if not apkbuild["arch"]: - raise RuntimeError(f"'{pkgname}' is disabled (arch=\"\"). Please use" - " '--arch' to specify the desired architecture.") - - # Multiple architectures - if len(apkbuild["arch"]) > 1: - raise RuntimeError(f"'{pkgname}' supports multiple architectures" - f" ({', '.join(apkbuild['arch'])}). Please use" - " '--arch' to specify the desired architecture.") - - return apkbuild["arch"][0] - - -def get_outputdir(args, pkgname, apkbuild): - """ - Get the folder for the kernel compilation output. - For most APKBUILDs, this is $builddir. But some older ones still use - $srcdir/build (see the discussion in #1551). - """ - # Old style ($srcdir/build) - ret = "/home/pmos/build/src/build" - chroot = args.work + "/chroot_native" - if os.path.exists(chroot + ret + "/.config"): - logging.warning("*****") - logging.warning("NOTE: The code in this linux APKBUILD is pretty old." - " Consider making a backup and migrating to a modern" - " version with: pmbootstrap aportgen " + pkgname) - logging.warning("*****") - - return ret - - # New style ($builddir) - cmd = "srcdir=/home/pmos/build/src source APKBUILD; echo $builddir" - ret = pmb.chroot.user(args, ["sh", "-c", cmd], - "native", "/home/pmos/build", - output_return=True).rstrip() - if os.path.exists(chroot + ret + "/.config"): - return ret - # Some Mediatek kernels use a 'kernel' subdirectory - if os.path.exists(chroot + ret + "/kernel/.config"): - return os.path.join(ret, "kernel") - - # Out-of-tree builds ($_outdir) - if os.path.exists(chroot + ret + "/" + apkbuild["_outdir"] + "/.config"): - return os.path.join(ret, apkbuild["_outdir"]) - - # Not found - raise RuntimeError("Could not find the kernel config. Consider making a" - " backup of your APKBUILD and recreating it from the" - " template with: pmbootstrap aportgen " + pkgname) - - -def extract_and_patch_sources(args, pkgname, arch): - pmb.build.copy_to_buildpath(args, pkgname) - logging.info("(native) extract kernel source") - pmb.chroot.user(args, ["abuild", "unpack"], "native", "/home/pmos/build") - logging.info("(native) apply patches") - pmb.chroot.user(args, ["abuild", "prepare"], "native", - "/home/pmos/build", output="interactive", - env={"CARCH": arch}) - - -def menuconfig(args, pkgname, use_oldconfig): - # Pkgname: allow omitting "linux-" prefix - if not pkgname.startswith("linux-"): - pkgname = "linux-" + pkgname - - # Read apkbuild - aport = pmb.helpers.pmaports.find(args, pkgname) - apkbuild = pmb.parse.apkbuild(f"{aport}/APKBUILD") - arch = args.arch or get_arch(apkbuild) - suffix = pmb.build.autodetect.suffix(apkbuild, arch) - cross = pmb.build.autodetect.crosscompile(args, apkbuild, arch, suffix) - hostspec = pmb.parse.arch.alpine_to_hostspec(arch) - - # Set up build tools and makedepends - pmb.build.init(args, suffix) - if cross: - pmb.build.init_compiler(args, [], cross, arch) - - depends = apkbuild["makedepends"] - copy_xauth = False - - if use_oldconfig: - kopt = "oldconfig" - else: - kopt = "menuconfig" - if args.xconfig: - depends += ["qt5-qtbase-dev", "font-noto"] - kopt = "xconfig" - copy_xauth = True - elif args.nconfig: - kopt = "nconfig" - depends += ["ncurses-dev"] - else: - depends += ["ncurses-dev"] - - pmb.chroot.apk.install(args, depends) - - # Copy host's .xauthority into native - if copy_xauth: - pmb.chroot.other.copy_xauthority(args) - - extract_and_patch_sources(args, pkgname, arch) - - # Check for background color variable - color = os.environ.get("MENUCONFIG_COLOR") - - # Run make menuconfig - outputdir = get_outputdir(args, pkgname, apkbuild) - logging.info("(native) make " + kopt) - env = {"ARCH": pmb.parse.arch.alpine_to_kernel(arch), - "DISPLAY": os.environ.get("DISPLAY"), - "XAUTHORITY": "/home/pmos/.Xauthority"} - if cross: - env["CROSS_COMPILE"] = f"{hostspec}-" - env["CC"] = f"{hostspec}-gcc" - if color: - env["MENUCONFIG_COLOR"] = color - pmb.chroot.user(args, ["make", kopt], "native", - outputdir, output="tui", env=env) - - # Find the updated config - source = args.work + "/chroot_native" + outputdir + "/.config" - if not os.path.exists(source): - raise RuntimeError("No kernel config generated: " + source) - - # Update the aport (config and checksum) - logging.info("Copy kernel config back to aport-folder") - config = "config-" + apkbuild["_flavor"] + "." + arch - target = aport + "/" + config - pmb.helpers.run.user(args, ["cp", source, target]) - pmb.build.checksum.update(args, pkgname) - - # Check config - pmb.parse.kconfig.check(args, apkbuild["_flavor"], details=True) diff --git a/pmb/build/newapkbuild.py b/pmb/build/newapkbuild.py deleted file mode 100644 index 9ada1b53..00000000 --- a/pmb/build/newapkbuild.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import glob -import os -import logging -import pmb.chroot.user -import pmb.helpers.cli -import pmb.parse - - -def newapkbuild(args, folder, args_passed, force=False): - # Initialize build environment and build folder - pmb.build.init(args) - build = "/home/pmos/build" - build_outside = args.work + "/chroot_native" + build - if os.path.exists(build_outside): - pmb.chroot.root(args, ["rm", "-r", build]) - pmb.chroot.user(args, ["mkdir", "-p", build]) - - # Run newapkbuild - pmb.chroot.user(args, ["newapkbuild"] + args_passed, working_dir=build) - glob_result = glob.glob(build_outside + "/*/APKBUILD") - if not len(glob_result): - return - - # Paths for copying - source_apkbuild = glob_result[0] - pkgname = pmb.parse.apkbuild(source_apkbuild, False)["pkgname"] - target = args.aports + "/" + folder + "/" + pkgname - - # Move /home/pmos/build/$pkgname/* to /home/pmos/build/* - for path in glob.glob(build_outside + "/*/*"): - path_inside = build + "/" + pkgname + "/" + os.path.basename(path) - pmb.chroot.user(args, ["mv", path_inside, build]) - pmb.chroot.user(args, ["rmdir", build + "/" + pkgname]) - - # Overwrite confirmation - if os.path.exists(target): - logging.warning("WARNING: Folder already exists: " + target) - question = "Continue and delete its contents?" - if not force and not pmb.helpers.cli.confirm(args, question): - raise RuntimeError("Aborted.") - pmb.helpers.run.user(args, ["rm", "-r", target]) - - # Copy the aport (without the extracted src folder) - logging.info("Create " + target) - pmb.helpers.run.user(args, ["mkdir", "-p", target]) - for path in glob.glob(build_outside + "/*"): - if not os.path.isdir(path): - pmb.helpers.run.user(args, ["cp", path, target]) diff --git a/pmb/build/other.py b/pmb/build/other.py deleted file mode 100644 index 1c6bafae..00000000 --- a/pmb/build/other.py +++ /dev/null @@ -1,179 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import glob -import logging -import os -import shlex -import datetime - -import pmb.chroot -import pmb.helpers.file -import pmb.helpers.git -import pmb.helpers.pmaports -import pmb.helpers.run -import pmb.parse.apkindex -import pmb.parse.version - - -def copy_to_buildpath(args, package, suffix="native"): - # Sanity check - aport = pmb.helpers.pmaports.find(args, package) - if not os.path.exists(aport + "/APKBUILD"): - raise ValueError("Path does not contain an APKBUILD file:" + - aport) - - # Clean up folder - build = args.work + "/chroot_" + suffix + "/home/pmos/build" - if os.path.exists(build): - pmb.chroot.root(args, ["rm", "-rf", "/home/pmos/build"], suffix) - - # Copy aport contents with resolved symlinks - pmb.helpers.run.root(args, ["mkdir", "-p", build]) - for entry in os.listdir(aport): - # Don't copy those dirs, as those have probably been generated by running `abuild` - # on the host system directly and not cleaning up after itself. - # Those dirs might contain broken symlinks and cp fails resolving them. - if entry in ["src", "pkg"]: - logging.warn(f"WARNING: Not copying {entry}, looks like a leftover from abuild") - continue - pmb.helpers.run.root(args, ["cp", "-rL", f"{aport}/{entry}", f"{build}/{entry}"]) - - pmb.chroot.root(args, ["chown", "-R", "pmos:pmos", - "/home/pmos/build"], suffix) - - -def is_necessary(args, arch, apkbuild, indexes=None): - """ - Check if the package has already been built. Compared to abuild's check, - this check also works for different architectures. - - :param arch: package target architecture - :param apkbuild: from pmb.parse.apkbuild() - :param indexes: list of APKINDEX.tar.gz paths - :returns: boolean - """ - # Get package name, version, define start of debug message - package = apkbuild["pkgname"] - version_new = apkbuild["pkgver"] + "-r" + apkbuild["pkgrel"] - msg = "Build is necessary for package '" + package + "': " - - # Get old version from APKINDEX - index_data = pmb.parse.apkindex.package(args, package, arch, False, - indexes) - if not index_data: - logging.debug(msg + "No binary package available") - return True - - # Can't build pmaport for arch: use Alpine's package (#1897) - if arch and not pmb.helpers.pmaports.check_arches(apkbuild["arch"], arch): - logging.verbose(f"{package}: build is not necessary, because pmaport" - " can't be built for {arch}. Using Alpine's binary" - " package.") - return False - - # a) Binary repo has a newer version - version_old = index_data["version"] - if pmb.parse.version.compare(version_old, version_new) == 1: - logging.warning("WARNING: package {}: aport version {} is lower than" - " {} from the binary repository. {} will be used when" - " installing {}. See also:" - " " - "".format(package, version_new, version_old, - version_old, package)) - return False - - # b) Aports folder has a newer version - if version_new != version_old: - logging.debug(f"{msg}Binary package out of date (binary: " - f"{version_old}, aport: {version_new})") - return True - - # Aports and binary repo have the same version. - return False - - -def index_repo(args, arch=None): - """ - Recreate the APKINDEX.tar.gz for a specific repo, and clear the parsing - cache for that file for the current pmbootstrap session (to prevent - rebuilding packages twice, in case the rebuild takes less than a second). - - :param arch: when not defined, re-index all repos - """ - pmb.build.init(args) - - channel = pmb.config.pmaports.read_config(args)["channel"] - if arch: - paths = [f"{args.work}/packages/{channel}/{arch}"] - else: - paths = glob.glob(f"{args.work}/packages/{channel}/*") - - for path in paths: - if os.path.isdir(path): - path_arch = os.path.basename(path) - path_repo_chroot = "/home/pmos/packages/pmos/" + path_arch - logging.debug("(native) index " + path_arch + " repository") - description = str(datetime.datetime.now()) - commands = [ - # Wrap the index command with sh so we can use '*.apk' - ["sh", "-c", "apk -q index --output APKINDEX.tar.gz_" - " --description " + shlex.quote(description) + "" - " --rewrite-arch " + shlex.quote(path_arch) + " *.apk"], - ["abuild-sign", "APKINDEX.tar.gz_"], - ["mv", "APKINDEX.tar.gz_", "APKINDEX.tar.gz"] - ] - for command in commands: - pmb.chroot.user(args, command, working_dir=path_repo_chroot) - else: - logging.debug("NOTE: Can't build index for: " + path) - pmb.parse.apkindex.clear_cache(f"{path}/APKINDEX.tar.gz") - - -def configure_abuild(args, suffix, verify=False): - """ - Set the correct JOBS count in abuild.conf - - :param verify: internally used to test if changing the config has worked. - """ - path = args.work + "/chroot_" + suffix + "/etc/abuild.conf" - prefix = "export JOBS=" - with open(path, encoding="utf-8") as handle: - for line in handle: - if not line.startswith(prefix): - continue - if line != (prefix + args.jobs + "\n"): - if verify: - raise RuntimeError(f"Failed to configure abuild: {path}" - "\nTry to delete the file" - "(or zap the chroot).") - pmb.chroot.root(args, ["sed", "-i", "-e", - f"s/^{prefix}.*/{prefix}{args.jobs}/", - "/etc/abuild.conf"], - suffix) - configure_abuild(args, suffix, True) - return - pmb.chroot.root(args, ["sed", "-i", f"$ a\\{prefix}{args.jobs}", "/etc/abuild.conf"], suffix) - - -def configure_ccache(args, suffix="native", verify=False): - """ - Set the maximum ccache size - - :param verify: internally used to test if changing the config has worked. - """ - # Check if the settings have been set already - arch = pmb.parse.arch.from_chroot_suffix(args, suffix) - path = args.work + "/cache_ccache_" + arch + "/ccache.conf" - if os.path.exists(path): - with open(path, encoding="utf-8") as handle: - for line in handle: - if line == ("max_size = " + args.ccache_size + "\n"): - return - if verify: - raise RuntimeError("Failed to configure ccache: " + path + "\nTry to" - " delete the file (or zap the chroot).") - - # Set the size and verify - pmb.chroot.user(args, ["ccache", "--max-size", args.ccache_size], - suffix) - configure_ccache(args, suffix, True) diff --git a/pmb/chroot/__init__.py b/pmb/chroot/__init__.py deleted file mode 100644 index 01b50a01..00000000 --- a/pmb/chroot/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -from pmb.chroot.init import init, init_keys -from pmb.chroot.mount import mount, mount_native_into_foreign, remove_mnt_pmbootstrap -from pmb.chroot.root import root -from pmb.chroot.user import user -from pmb.chroot.user import exists as user_exists -from pmb.chroot.shutdown import shutdown -from pmb.chroot.zap import zap diff --git a/pmb/chroot/apk.py b/pmb/chroot/apk.py deleted file mode 100644 index 2603c505..00000000 --- a/pmb/chroot/apk.py +++ /dev/null @@ -1,266 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import logging -import shlex - -import pmb.chroot -import pmb.config -import pmb.helpers.apk -import pmb.helpers.pmaports -import pmb.parse.apkindex -import pmb.parse.arch -import pmb.parse.depends -import pmb.parse.version - - -def update_repository_list(args, suffix="native", check=False): - """ - Update /etc/apk/repositories, if it is outdated (when the user changed the - --mirror-alpine or --mirror-pmOS parameters). - - :param check: This function calls it self after updating the - /etc/apk/repositories file, to check if it was successful. - Only for this purpose, the "check" parameter should be set to - True. - """ - # Skip if we already did this - if suffix in pmb.helpers.other.cache["apk_repository_list_updated"]: - return - - # Read old entries or create folder structure - path = f"{args.work}/chroot_{suffix}/etc/apk/repositories" - lines_old = [] - if os.path.exists(path): - # Read all old lines - lines_old = [] - with open(path) as handle: - for line in handle: - lines_old.append(line[:-1]) - else: - pmb.helpers.run.root(args, ["mkdir", "-p", os.path.dirname(path)]) - - # Up to date: Save cache, return - lines_new = pmb.helpers.repo.urls(args) - if lines_old == lines_new: - pmb.helpers.other.cache["apk_repository_list_updated"].append(suffix) - return - - # Check phase: raise error when still outdated - if check: - raise RuntimeError(f"Failed to update: {path}") - - # Update the file - logging.debug(f"({suffix}) update /etc/apk/repositories") - if os.path.exists(path): - pmb.helpers.run.root(args, ["rm", path]) - for line in lines_new: - pmb.helpers.run.root(args, ["sh", "-c", "echo " - f"{shlex.quote(line)} >> {path}"]) - update_repository_list(args, suffix, True) - - -def check_min_version(args, suffix="native"): - """ - Check the minimum apk version, before running it the first time in the - current session (lifetime of one pmbootstrap call). - """ - - # Skip if we already did this - if suffix in pmb.helpers.other.cache["apk_min_version_checked"]: - return - - # Skip if apk is not installed yet - if not os.path.exists(f"{args.work}/chroot_{suffix}/sbin/apk"): - logging.debug(f"NOTE: Skipped apk version check for chroot '{suffix}'" - ", because it is not installed yet!") - return - - # Compare - version_installed = installed(args, suffix)["apk-tools"]["version"] - pmb.helpers.apk.check_outdated( - args, version_installed, - "Delete your http cache and zap all chroots, then try again:" - " 'pmbootstrap zap -hc'") - - # Mark this suffix as checked - pmb.helpers.other.cache["apk_min_version_checked"].append(suffix) - - -def install_build(args, package, arch): - """ - Build an outdated package unless pmbootstrap was invoked with - "pmbootstrap install" and the option to build packages during pmb install - is disabled. - - :param package: name of the package to build - :param arch: architecture of the package to build - """ - # User may have disabled building packages during "pmbootstrap install" - if args.action == "install" and not args.build_pkgs_on_install: - if not pmb.parse.apkindex.package(args, package, arch, False): - raise RuntimeError(f"{package}: no binary package found for" - f" {arch}, and compiling packages during" - " 'pmbootstrap install' has been disabled." - " Consider changing this option in" - " 'pmbootstrap init'.") - # Use the existing binary package - return - - # Build the package if it's in pmaports and there is no binary package - # with the same pkgver and pkgrel. This check is done in - # pmb.build.is_necessary, which gets called in pmb.build.package. - return pmb.build.package(args, package, arch) - - -def packages_split_to_add_del(packages): - """ - Sort packages into "to_add" and "to_del" lists depending on their pkgname - starting with an exclamation mark. - - :param packages: list of pkgnames - :returns: (to_add, to_del) - tuple of lists of pkgnames, e.g. - (["hello-world", ...], ["some-conflict-pkg", ...]) - """ - to_add = [] - to_del = [] - - for package in packages: - if package.startswith("!"): - to_del.append(package.lstrip("!")) - else: - to_add.append(package) - - return (to_add, to_del) - - -def packages_get_locally_built_apks(args, packages, arch): - """ - Iterate over packages and if existing, get paths to locally built packages. - This is used to force apk to upgrade packages to newer local versions, even - if the pkgver and pkgrel did not change. - - :param packages: list of pkgnames - :param arch: architecture that the locally built packages should have - :returns: list of apk file paths that are valid inside the chroots, e.g. - ["/mnt/pmbootstrap/packages/x86_64/hello-world-1-r6.apk", ...] - """ - channel = pmb.config.pmaports.read_config(args)["channel"] - ret = [] - - for package in packages: - data_repo = pmb.parse.apkindex.package(args, package, arch, False) - if not data_repo: - continue - - apk_file = f"{package}-{data_repo['version']}.apk" - if not os.path.exists(f"{args.work}/packages/{channel}/{arch}/{apk_file}"): - continue - - ret.append(f"/mnt/pmbootstrap/packages/{arch}/{apk_file}") - - return ret - - -def install_run_apk(args, to_add, to_add_local, to_del, suffix): - """ - Run apk to add packages, and ensure only the desired packages get - explicitly marked as installed. - - :param to_add: list of pkgnames to install, without their dependencies - :param to_add_local: return of packages_get_locally_built_apks() - :param to_del: list of pkgnames to be deleted, this should be set to - conflicting dependencies in any of the packages to be - installed or their dependencies (e.g. ["osk-sdl"]) - :param suffix: the chroot suffix, e.g. "native" or "rootfs_qemu-amd64" - """ - # Sanitize packages: don't allow '--allow-untrusted' and other options - # to be passed to apk! - for package in to_add + to_add_local + to_del: - if package.startswith("-"): - raise ValueError(f"Invalid package name: {package}") - - commands = [["add"] + to_add] - - # Use a virtual package to mark only the explicitly requested packages as - # explicitly installed, not the ones in to_add_local - if to_add_local: - commands += [["add", "-u", "--virtual", ".pmbootstrap"] + to_add_local, - ["del", ".pmbootstrap"]] - - if to_del: - commands += [["del"] + to_del] - - for (i, command) in enumerate(commands): - # --no-interactive is a parameter to `add`, so it must be appended or apk - # gets confused - command += ["--no-interactive"] - - if args.offline: - command = ["--no-network"] + command - if i == 0: - pmb.helpers.apk.apk_with_progress(args, ["apk"] + command, - chroot=True, suffix=suffix) - else: - # Virtual package related commands don't actually install or remove - # packages, but only mark the right ones as explicitly installed. - # They finish up almost instantly, so don't display a progress bar. - pmb.chroot.root(args, ["apk", "--no-progress"] + command, - suffix=suffix) - - -def install(args, packages, suffix="native", build=True): - """ - Install packages from pmbootstrap's local package index or the pmOS/Alpine - binary package mirrors. Iterate over all dependencies recursively, and - build missing packages as necessary. - - :param packages: list of pkgnames to be installed - :param suffix: the chroot suffix, e.g. "native" or "rootfs_qemu-amd64" - :param build: automatically build the package, when it does not exist yet - or needs to be updated, and it is inside pmaports. For the - special case that all packages are expected to be in Alpine's - repositories, set this to False for performance optimization. - """ - arch = pmb.parse.arch.from_chroot_suffix(args, suffix) - - if not packages: - logging.verbose("pmb.chroot.apk.install called with empty packages list," - " ignoring") - return - - # Initialize chroot - check_min_version(args, suffix) - pmb.chroot.init(args, suffix) - - packages_with_depends = pmb.parse.depends.recurse(args, packages, suffix) - to_add, to_del = packages_split_to_add_del(packages_with_depends) - - if build: - for package in to_add: - install_build(args, package, arch) - - to_add_local = packages_get_locally_built_apks(args, to_add, arch) - to_add_no_deps, _ = packages_split_to_add_del(packages) - - logging.info(f"({suffix}) install {' '.join(to_add_no_deps)}") - install_run_apk(args, to_add_no_deps, to_add_local, to_del, suffix) - - -def installed(args, suffix="native"): - """ - Read the list of installed packages (which has almost the same format, as - an APKINDEX, but with more keys). - - :returns: a dictionary with the following structure: - { "postmarketos-mkinitfs": - { - "pkgname": "postmarketos-mkinitfs" - "version": "0.0.4-r10", - "depends": ["busybox-extras", "lddtree", ...], - "provides": ["mkinitfs=0.0.1"] - }, ... - } - """ - path = f"{args.work}/chroot_{suffix}/lib/apk/db/installed" - return pmb.parse.apkindex.parse(path, False) diff --git a/pmb/chroot/apk_static.py b/pmb/chroot/apk_static.py deleted file mode 100644 index 00f9cc37..00000000 --- a/pmb/chroot/apk_static.py +++ /dev/null @@ -1,176 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import logging -import shutil -import tarfile -import tempfile -import stat - -import pmb.helpers.apk -import pmb.helpers.run -import pmb.config -import pmb.config.load -import pmb.parse.apkindex -import pmb.helpers.http -import pmb.parse.version - - -def read_signature_info(tar): - """ - Find various information about the signature that was used to sign - /sbin/apk.static inside the archive (not to be confused with the normal apk - archive signature!) - - :returns: (sigfilename, sigkey_path) - """ - # Get signature filename and key - prefix = "sbin/apk.static.SIGN.RSA." - sigfilename = None - for filename in tar.getnames(): - if filename.startswith(prefix): - sigfilename = filename - break - if not sigfilename: - raise RuntimeError("Could not find signature filename in apk." - " This means that your apk file is damaged." - " Delete it and try again." - " If the problem persists, fill out a bug report.") - sigkey = sigfilename[len(prefix):] - logging.debug(f"sigfilename: {sigfilename}") - logging.debug(f"sigkey: {sigkey}") - - # Get path to keyfile on disk - sigkey_path = f"{pmb.config.apk_keys_path}/{sigkey}" - if "/" in sigkey or not os.path.exists(sigkey_path): - logging.debug(f"sigkey_path: {sigkey_path}") - raise RuntimeError(f"Invalid signature key: {sigkey}") - - return (sigfilename, sigkey_path) - - -def extract_temp(tar, sigfilename): - """ - Extract apk.static and signature as temporary files. - """ - ret = { - "apk": { - "filename": "sbin/apk.static", - "temp_path": None - }, - "sig": { - "filename": sigfilename, - "temp_path": None - } - } - for ftype in ret.keys(): - member = tar.getmember(ret[ftype]["filename"]) - - handle, path = tempfile.mkstemp(ftype, "pmbootstrap") - handle = open(handle, "wb") - ret[ftype]["temp_path"] = path - shutil.copyfileobj(tar.extractfile(member), handle) - - logging.debug(f"extracted: {path}") - handle.close() - return ret - - -def verify_signature(args, files, sigkey_path): - """ - Verify the signature with openssl. - - :param files: return value from extract_temp() - :raises RuntimeError: when verification failed and removes temp files - """ - logging.debug(f"Verify apk.static signature with {sigkey_path}") - try: - pmb.helpers.run.user(args, ["openssl", "dgst", "-sha1", "-verify", - sigkey_path, "-signature", files[ - "sig"]["temp_path"], - files["apk"]["temp_path"]]) - except BaseException: - os.unlink(files["sig"]["temp_path"]) - os.unlink(files["apk"]["temp_path"]) - raise RuntimeError("Failed to validate signature of apk.static." - " Either openssl is not installed, or the" - " download failed. Run 'pmbootstrap zap -hc' to" - " delete the download and try again.") - - -def extract(args, version, apk_path): - """ - Extract everything to temporary locations, verify signatures and reported - versions. When everything is right, move the extracted apk.static to the - final location. - """ - # Extract to a temporary path - with tarfile.open(apk_path, "r:gz") as tar: - sigfilename, sigkey_path = read_signature_info(tar) - files = extract_temp(tar, sigfilename) - - # Verify signature - verify_signature(args, files, sigkey_path) - os.unlink(files["sig"]["temp_path"]) - temp_path = files["apk"]["temp_path"] - - # Verify the version that the extracted binary reports - logging.debug("Verify the version reported by the apk.static binary" - f" (must match the package version {version})") - os.chmod(temp_path, os.stat(temp_path).st_mode | stat.S_IEXEC) - version_bin = pmb.helpers.run.user(args, [temp_path, "--version"], - output_return=True) - version_bin = version_bin.split(" ")[1].split(",")[0] - if not version.startswith(f"{version_bin}-r"): - os.unlink(temp_path) - raise RuntimeError(f"Downloaded apk-tools-static-{version}.apk," - " but the apk binary inside that package reports" - f" to be version: {version_bin}!" - " Looks like a downgrade attack" - " from a malicious server! Switch the server (-m)" - " and try again.") - - # Move it to the right path - target_path = f"{args.work}/apk.static" - shutil.move(temp_path, target_path) - - -def download(args, file): - """ - Download a single file from an Alpine mirror. - """ - channel_cfg = pmb.config.pmaports.read_config_channel(args) - mirrordir = channel_cfg["mirrordir_alpine"] - base_url = f"{args.mirror_alpine}{mirrordir}/main/{pmb.config.arch_native}" - return pmb.helpers.http.download(args, f"{base_url}/{file}", file) - - -def init(args): - """ - Download, verify, extract $WORK/apk.static. - """ - # Get and parse the APKINDEX - apkindex = pmb.helpers.repo.alpine_apkindex_path(args, "main") - index_data = pmb.parse.apkindex.package(args, "apk-tools-static", - indexes=[apkindex]) - version = index_data["version"] - - # Verify the apk-tools-static version - pmb.helpers.apk.check_outdated( - args, version, "Run 'pmbootstrap update', then try again.") - - # Download, extract, verify apk-tools-static - apk_name = f"apk-tools-static-{version}.apk" - apk_static = download(args, apk_name) - extract(args, version, apk_static) - - -def run(args, parameters): - # --no-interactive is a parameter to `add`, so it must be appended or apk - # gets confused - parameters += ["--no-interactive"] - - if args.offline: - parameters = ["--no-network"] + parameters - pmb.helpers.apk.apk_with_progress( - args, [f"{args.work}/apk.static"] + parameters, chroot=False) diff --git a/pmb/chroot/binfmt.py b/pmb/chroot/binfmt.py deleted file mode 100644 index 61b0b026..00000000 --- a/pmb/chroot/binfmt.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import logging - -import pmb.helpers.run -import pmb.helpers.other -import pmb.parse -import pmb.parse.arch - - -def is_registered(arch_qemu): - return os.path.exists("/proc/sys/fs/binfmt_misc/qemu-" + arch_qemu) - - -def register(args, arch): - """ - Get arch, magic, mask. - """ - arch_qemu = pmb.parse.arch.alpine_to_qemu(arch) - - # always make sure the qemu- binary is installed, since registering - # may happen outside of this method (e.g. by OS) - if f"qemu-{arch_qemu}" not in pmb.chroot.apk.installed(args): - pmb.chroot.apk.install(args, ["qemu-" + arch_qemu]) - - if is_registered(arch_qemu): - return - pmb.helpers.other.check_binfmt_misc(args) - - # Don't continue if the actions from check_binfmt_misc caused the OS to - # automatically register the target arch - if is_registered(arch_qemu): - return - - info = pmb.parse.binfmt_info(arch_qemu) - - # Build registration string - # https://en.wikipedia.org/wiki/Binfmt_misc - # :name:type:offset:magic:mask:interpreter:flags - name = "qemu-" + arch_qemu - type = "M" - offset = "" - magic = info["magic"] - mask = info["mask"] - interpreter = "/usr/bin/qemu-" + arch_qemu + "-static" - flags = "C" - code = ":".join(["", name, type, offset, magic, mask, interpreter, - flags]) - - # Register in binfmt_misc - logging.info("Register qemu binfmt (" + arch_qemu + ")") - register = "/proc/sys/fs/binfmt_misc/register" - pmb.helpers.run.root( - args, ["sh", "-c", 'echo "' + code + '" > ' + register]) - - -def unregister(args, arch): - arch_qemu = pmb.parse.arch.alpine_to_qemu(arch) - binfmt_file = "/proc/sys/fs/binfmt_misc/qemu-" + arch_qemu - if not os.path.exists(binfmt_file): - return - logging.info("Unregister qemu binfmt (" + arch_qemu + ")") - pmb.helpers.run.root(args, ["sh", "-c", "echo -1 > " + binfmt_file]) diff --git a/pmb/chroot/init.py b/pmb/chroot/init.py deleted file mode 100644 index e2622ffc..00000000 --- a/pmb/chroot/init.py +++ /dev/null @@ -1,129 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import os -import glob -import filecmp - -import pmb.chroot -import pmb.chroot.apk_static -import pmb.config -import pmb.config.workdir -import pmb.helpers.repo -import pmb.helpers.run -import pmb.parse.arch - - -def copy_resolv_conf(args, suffix="native"): - """ - Use pythons super fast file compare function (due to caching) - and copy the /etc/resolv.conf to the chroot, in case it is - different from the host. - If the file doesn't exist, create an empty file with 'touch'. - """ - host = "/etc/resolv.conf" - chroot = f"{args.work}/chroot_{suffix}{host}" - if os.path.exists(host): - if not os.path.exists(chroot) or not filecmp.cmp(host, chroot): - pmb.helpers.run.root(args, ["cp", host, chroot]) - else: - pmb.helpers.run.root(args, ["touch", chroot]) - - -def mark_in_chroot(args, suffix="native"): - """ - Touch a flag so we can know when we're running in chroot (and - don't accidentally flash partitions on our host). This marker - gets removed in pmb.chroot.shutdown (pmbootstrap shutdown). - """ - in_chroot_file = f"{args.work}/chroot_{suffix}/in-pmbootstrap" - if not os.path.exists(in_chroot_file): - pmb.helpers.run.root(args, ["touch", in_chroot_file]) - - -def setup_qemu_emulation(args, suffix): - arch = pmb.parse.arch.from_chroot_suffix(args, suffix) - if not pmb.parse.arch.cpu_emulation_required(arch): - return - - chroot = f"{args.work}/chroot_{suffix}" - arch_qemu = pmb.parse.arch.alpine_to_qemu(arch) - - # mount --bind the qemu-user binary - pmb.chroot.binfmt.register(args, arch) - pmb.helpers.mount.bind_file(args, f"{args.work}/chroot_native" - f"/usr/bin/qemu-{arch_qemu}", - f"{chroot}/usr/bin/qemu-{arch_qemu}-static", - create_folders=True) - - -def init_keys(args): - """ - All Alpine and postmarketOS repository keys are shipped with pmbootstrap. - Copy them into $WORK/config_apk_keys, which gets mounted inside the various - chroots as /etc/apk/keys. - - This is done before installing any package, so apk can verify APKINDEX - files of binary repositories even though alpine-keys/postmarketos-keys are - not installed yet. - """ - for key in glob.glob(f"{pmb.config.apk_keys_path}/*.pub"): - target = f"{args.work}/config_apk_keys/{os.path.basename(key)}" - if not os.path.exists(target): - # Copy as root, so the resulting files in chroots are owned by root - pmb.helpers.run.root(args, ["cp", key, target]) - - -def init(args, suffix="native"): - # When already initialized: just prepare the chroot - chroot = f"{args.work}/chroot_{suffix}" - arch = pmb.parse.arch.from_chroot_suffix(args, suffix) - - pmb.chroot.mount(args, suffix) - setup_qemu_emulation(args, suffix) - mark_in_chroot(args, suffix) - if os.path.islink(f"{chroot}/bin/sh"): - pmb.config.workdir.chroot_check_channel(args, suffix) - copy_resolv_conf(args, suffix) - pmb.chroot.apk.update_repository_list(args, suffix) - return - - # Require apk-tools-static - pmb.chroot.apk_static.init(args) - - logging.info(f"({suffix}) install alpine-base") - - # Initialize cache - apk_cache = f"{args.work}/cache_apk_{arch}" - pmb.helpers.run.root(args, ["ln", "-s", "-f", "/var/cache/apk", - f"{chroot}/etc/apk/cache"]) - - # Initialize /etc/apk/keys/, resolv.conf, repositories - init_keys(args) - copy_resolv_conf(args, suffix) - pmb.chroot.apk.update_repository_list(args, suffix) - - pmb.config.workdir.chroot_save_init(args, suffix) - - # Install alpine-base - pmb.helpers.repo.update(args, arch) - pmb.chroot.apk_static.run(args, ["--root", chroot, - "--cache-dir", apk_cache, - "--initdb", "--arch", arch, - "add", "alpine-base"]) - - # Building chroots: create "pmos" user, add symlinks to /home/pmos - if not suffix.startswith("rootfs_"): - pmb.chroot.root(args, ["adduser", "-D", "pmos", "-u", - pmb.config.chroot_uid_user], - suffix, auto_init=False) - - # Create the links (with subfolders if necessary) - for target, link_name in pmb.config.chroot_home_symlinks.items(): - link_dir = os.path.dirname(link_name) - if not os.path.exists(f"{chroot}{link_dir}"): - pmb.chroot.user(args, ["mkdir", "-p", link_dir], suffix) - if not os.path.exists(f"{chroot}{target}"): - pmb.chroot.root(args, ["mkdir", "-p", target], suffix) - pmb.chroot.user(args, ["ln", "-s", target, link_name], suffix) - pmb.chroot.root(args, ["chown", "pmos:pmos", target], suffix) diff --git a/pmb/chroot/initfs.py b/pmb/chroot/initfs.py deleted file mode 100644 index 257534db..00000000 --- a/pmb/chroot/initfs.py +++ /dev/null @@ -1,122 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import logging -import pmb.chroot.initfs_hooks -import pmb.chroot.other -import pmb.chroot.apk -import pmb.config.pmaports -import pmb.helpers.cli - - -def build(args, flavor, suffix): - # Update mkinitfs and hooks - pmb.chroot.apk.install(args, ["postmarketos-mkinitfs"], suffix) - pmb.chroot.initfs_hooks.update(args, suffix) - pmaports_cfg = pmb.config.pmaports.read_config(args) - - # Call mkinitfs - logging.info(f"({suffix}) mkinitfs {flavor}") - if pmaports_cfg.get("supported_mkinitfs_without_flavors", False): - pmb.chroot.root(args, ["mkinitfs"], suffix) - else: - release_file = (f"{args.work}/chroot_{suffix}/usr/share/kernel/" - f"{flavor}/kernel.release") - with open(release_file, "r") as handle: - release = handle.read().rstrip() - pmb.chroot.root(args, ["mkinitfs", "-o", - f"/boot/initramfs-{flavor}", release], - suffix) - - -def extract(args, flavor, suffix, extra=False): - """ - Extract the initramfs to /tmp/initfs-extracted or the initramfs-extra to - /tmp/initfs-extra-extracted and return the outside extraction path. - """ - # Extraction folder - inside = "/tmp/initfs-extracted" - - pmaports_cfg = pmb.config.pmaports.read_config(args) - if pmaports_cfg.get("supported_mkinitfs_without_flavors", False): - initfs_file = "/boot/initramfs" - else: - initfs_file = f"/boot/initramfs-${flavor}" - if extra: - inside = "/tmp/initfs-extra-extracted" - initfs_file += "-extra" - - outside = f"{args.work}/chroot_{suffix}{inside}" - if os.path.exists(outside): - if not pmb.helpers.cli.confirm(args, f"Extraction folder {outside}" - " already exists." - " Do you want to overwrite it?"): - raise RuntimeError("Aborted!") - pmb.chroot.root(args, ["rm", "-r", inside], suffix) - - # Extraction script (because passing a file to stdin is not allowed - # in pmbootstrap's chroot/shell functions for security reasons) - with open(f"{args.work}/chroot_{suffix}/tmp/_extract.sh", "w") as handle: - handle.write( - "#!/bin/sh\n" - f"cd {inside} && cpio -i < _initfs\n") - - # Extract - commands = [["mkdir", "-p", inside], - ["cp", initfs_file, f"{inside}/_initfs.gz"], - ["gzip", "-d", f"{inside}/_initfs.gz"], - ["cat", "/tmp/_extract.sh"], # for the log - ["sh", "/tmp/_extract.sh"], - ["rm", "/tmp/_extract.sh", f"{inside}/_initfs"] - ] - for command in commands: - pmb.chroot.root(args, command, suffix) - - # Return outside path for logging - return outside - - -def ls(args, flavor, suffix, extra=False): - tmp = "/tmp/initfs-extracted" - if extra: - tmp = "/tmp/initfs-extra-extracted" - extract(args, flavor, suffix, extra) - pmb.chroot.root(args, ["ls", "-lahR", "."], suffix, tmp, "stdout") - pmb.chroot.root(args, ["rm", "-r", tmp], suffix) - - -def frontend(args): - # Find the appropriate kernel flavor - suffix = f"rootfs_{args.device}" - flavor = pmb.chroot.other.kernel_flavor_installed(args, suffix) - - # Handle initfs actions - action = args.action_initfs - if action == "build": - build(args, flavor, suffix) - elif action == "extract": - dir = extract(args, flavor, suffix) - logging.info(f"Successfully extracted initramfs to: {dir}") - dir_extra = extract(args, flavor, suffix, True) - logging.info(f"Successfully extracted initramfs-extra to: {dir_extra}") - elif action == "ls": - logging.info("*** initramfs ***") - ls(args, flavor, suffix) - logging.info("*** initramfs-extra ***") - ls(args, flavor, suffix, True) - - # Handle hook actions - elif action == "hook_ls": - pmb.chroot.initfs_hooks.ls(args, suffix) - else: - if action == "hook_add": - pmb.chroot.initfs_hooks.add(args, args.hook, suffix) - elif action == "hook_del": - pmb.chroot.initfs_hooks.delete(args, args.hook, suffix) - - # Rebuild the initfs after adding/removing a hook - build(args, flavor, suffix) - - if action in ["ls", "extract"]: - link = "https://wiki.postmarketos.org/wiki/Initramfs_development" - logging.info(f"See also: <{link}>") diff --git a/pmb/chroot/initfs_hooks.py b/pmb/chroot/initfs_hooks.py deleted file mode 100644 index 75a85a0f..00000000 --- a/pmb/chroot/initfs_hooks.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import glob -import logging - -import pmb.config -import pmb.chroot.apk - - -def list_chroot(args, suffix, remove_prefix=True): - ret = [] - prefix = pmb.config.initfs_hook_prefix - for pkgname in pmb.chroot.apk.installed(args, suffix).keys(): - if pkgname.startswith(prefix): - if remove_prefix: - ret.append(pkgname[len(prefix):]) - else: - ret.append(pkgname) - return ret - - -def list_aports(args): - ret = [] - prefix = pmb.config.initfs_hook_prefix - for path in glob.glob(f"{args.aports}/*/{prefix}*"): - ret.append(os.path.basename(path)[len(prefix):]) - return ret - - -def ls(args, suffix): - hooks_chroot = list_chroot(args, suffix) - hooks_aports = list_aports(args) - - for hook in hooks_aports: - line = f"* {hook} ({'' if hook in hooks_chroot else 'not '}installed)" - logging.info(line) - - -def add(args, hook, suffix): - if hook not in list_aports(args): - raise RuntimeError("Invalid hook name!" - " Run 'pmbootstrap initfs hook_ls'" - " to get a list of all hooks.") - prefix = pmb.config.initfs_hook_prefix - pmb.chroot.apk.install(args, [f"{prefix}{hook}"], suffix) - - -def delete(args, hook, suffix): - if hook not in list_chroot(args, suffix): - raise RuntimeError("There is no such hook installed!") - prefix = pmb.config.initfs_hook_prefix - pmb.chroot.root(args, ["apk", "del", f"{prefix}{hook}"], suffix) - - -def update(args, suffix): - """ - Rebuild and update all hooks that are out of date - """ - pmb.chroot.apk.install(args, list_chroot(args, suffix, False), suffix) diff --git a/pmb/chroot/mount.py b/pmb/chroot/mount.py deleted file mode 100644 index 45053c0d..00000000 --- a/pmb/chroot/mount.py +++ /dev/null @@ -1,123 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import glob -import logging -import os -import pmb.config -import pmb.parse -import pmb.helpers.mount - - -def create_device_nodes(args, suffix): - """ - Create device nodes for null, zero, full, random, urandom in the chroot. - """ - try: - chroot = args.work + "/chroot_" + suffix - - # Create all device nodes as specified in the config - for dev in pmb.config.chroot_device_nodes: - path = chroot + "/dev/" + str(dev[4]) - if not os.path.exists(path): - pmb.helpers.run.root(args, ["mknod", - "-m", str(dev[0]), # permissions - path, # name - str(dev[1]), # type - str(dev[2]), # major - str(dev[3]), # minor - ]) - - # Verify major and minor numbers of created nodes - for dev in pmb.config.chroot_device_nodes: - path = chroot + "/dev/" + str(dev[4]) - stat_result = os.stat(path) - rdev = stat_result.st_rdev - assert os.major(rdev) == dev[2], "Wrong major in " + path - assert os.minor(rdev) == dev[3], "Wrong minor in " + path - - # Verify /dev/zero reading and writing - path = chroot + "/dev/zero" - with open(path, "r+b", 0) as handle: - assert handle.write(bytes([0xff])), "Write failed for " + path - assert handle.read(1) == bytes([0x00]), "Read failed for " + path - - # On failure: Show filesystem-related error - except Exception as e: - logging.info(str(e) + "!") - raise RuntimeError("Failed to create device nodes in the '" + - suffix + "' chroot.") - - -def mount_dev_tmpfs(args, suffix="native"): - """ - Mount tmpfs inside the chroot's dev folder to make sure we can create - device nodes, even if the filesystem of the work folder does not support - it. - """ - # Do nothing when it is already mounted - dev = args.work + "/chroot_" + suffix + "/dev" - if pmb.helpers.mount.ismount(dev): - return - - # Create the $chroot/dev folder and mount tmpfs there - pmb.helpers.run.root(args, ["mkdir", "-p", dev]) - pmb.helpers.run.root(args, ["mount", "-t", "tmpfs", - "-o", "size=1M,noexec,dev", - "tmpfs", dev]) - - # Create pts, shm folders and device nodes - pmb.helpers.run.root(args, ["mkdir", "-p", dev + "/pts", dev + "/shm"]) - pmb.helpers.run.root(args, ["mount", "-t", "tmpfs", - "-o", "nodev,nosuid,noexec", - "tmpfs", dev + "/shm"]) - create_device_nodes(args, suffix) - - # Setup /dev/fd as a symlink - pmb.helpers.run.root(args, ["ln", "-sf", "/proc/self/fd", f"{dev}/"]) - - -def mount(args, suffix="native"): - # Mount tmpfs as the chroot's /dev - mount_dev_tmpfs(args, suffix) - - # Get all mountpoints - arch = pmb.parse.arch.from_chroot_suffix(args, suffix) - channel = pmb.config.pmaports.read_config(args)["channel"] - mountpoints = {} - for source, target in pmb.config.chroot_mount_bind.items(): - source = source.replace("$WORK", args.work) - source = source.replace("$ARCH", arch) - source = source.replace("$CHANNEL", channel) - mountpoints[source] = target - - # Mount if necessary - for source, target in mountpoints.items(): - target_full = args.work + "/chroot_" + suffix + target - pmb.helpers.mount.bind(args, source, target_full) - - -def mount_native_into_foreign(args, suffix): - source = args.work + "/chroot_native" - target = args.work + "/chroot_" + suffix + "/native" - pmb.helpers.mount.bind(args, source, target) - - musl = os.path.basename(glob.glob(source + "/lib/ld-musl-*.so.1")[0]) - musl_link = args.work + "/chroot_" + suffix + "/lib/" + musl - if not os.path.lexists(musl_link): - pmb.helpers.run.root(args, ["ln", "-s", "/native/lib/" + musl, - musl_link]) - -def remove_mnt_pmbootstrap(args, suffix): - """ Safely remove /mnt/pmbootstrap directories from the chroot, without - running rm -r as root and potentially removing data inside the - mountpoint in case it was still mounted (bug in pmbootstrap, or user - ran pmbootstrap 2x in parallel). This is similar to running 'rm -r -d', - but we don't assume that the host's rm has the -d flag (busybox does - not). """ - mnt_dir = f"{args.work}/chroot_{suffix}/mnt/pmbootstrap" - - if not os.path.exists(mnt_dir): - return - - for path in glob.glob(f"{mnt_dir}/*") + [mnt_dir]: - pmb.helpers.run.root(args, ["rmdir", path]) diff --git a/pmb/chroot/other.py b/pmb/chroot/other.py deleted file mode 100644 index 4af5029c..00000000 --- a/pmb/chroot/other.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import glob -import logging -import pmb.chroot.apk -import pmb.install - - -def kernel_flavor_installed(args, suffix, autoinstall=True): - """ - Get installed kernel flavor. Optionally install the device's kernel - beforehand. - - :param suffix: the chroot suffix, e.g. "native" or "rootfs_qemu-amd64" - :param autoinstall: install the device's kernel if it is not installed - :returns: * string with the installed kernel flavor, - e.g. ["postmarketos-qcom-sdm845"] - * None if no kernel is installed - """ - # Automatically install the selected kernel - if autoinstall: - packages = ([f"device-{args.device}"] + - pmb.install.get_kernel_package(args, args.device)) - pmb.chroot.apk.install(args, packages, suffix) - - pattern = f"{args.work}/chroot_{suffix}/usr/share/kernel/*" - glob_result = glob.glob(pattern) - - # There should be only one directory here - return os.path.basename(glob_result[0]) if glob_result else None - - -def tempfolder(args, path, suffix="native"): - """ - Create a temporary folder inside the chroot that belongs to "user". - The folder gets deleted, if it already exists. - - :param path: of the temporary folder inside the chroot - :returns: the path - """ - if os.path.exists(args.work + "/chroot_" + suffix + path): - pmb.chroot.root(args, ["rm", "-r", path]) - pmb.chroot.user(args, ["mkdir", "-p", path]) - return path - - -def copy_xauthority(args): - """ - Copy the host system's Xauthority file to the pmos user inside the chroot, - so we can start X11 applications from there. - """ - # Check $DISPLAY - logging.info("(native) copy host Xauthority") - if not os.environ.get("DISPLAY"): - raise RuntimeError("Your $DISPLAY variable is not set. If you have an" - " X11 server running as your current user, try" - " 'export DISPLAY=:0' and run your last" - " pmbootstrap command again.") - - # Check $XAUTHORITY - original = os.environ.get("XAUTHORITY") - if not original: - original = os.path.join(os.environ['HOME'], '.Xauthority') - if not os.path.exists(original): - raise RuntimeError("Could not find your Xauthority file, try to export" - " your $XAUTHORITY correctly. Looked here: " + - original) - - # Copy to chroot and chown - copy = args.work + "/chroot_native/home/pmos/.Xauthority" - if os.path.exists(copy): - pmb.helpers.run.root(args, ["rm", copy]) - pmb.helpers.run.root(args, ["cp", original, copy]) - pmb.chroot.root(args, ["chown", "pmos:pmos", "/home/pmos/.Xauthority"]) diff --git a/pmb/chroot/root.py b/pmb/chroot/root.py deleted file mode 100644 index 4555638d..00000000 --- a/pmb/chroot/root.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import shutil - -import pmb.config -import pmb.chroot -import pmb.chroot.binfmt -import pmb.helpers.run -import pmb.helpers.run_core - - -def executables_absolute_path(): - """ - Get the absolute paths to the sh and chroot executables. - """ - ret = {} - for binary in ["sh", "chroot"]: - path = shutil.which(binary, path=pmb.config.chroot_host_path) - if not path: - raise RuntimeError(f"Could not find the '{binary}'" - " executable. Make sure that it is in" - " your current user's PATH.") - ret[binary] = path - return ret - - -def root(args, cmd, suffix="native", working_dir="/", output="log", - output_return=False, check=None, env={}, auto_init=True, - disable_timeout=False): - """ - Run a command inside a chroot as root. - - :param env: dict of environment variables to be passed to the command, e.g. - {"JOBS": "5"} - :param auto_init: automatically initialize the chroot - - See pmb.helpers.run_core.core() for a detailed description of all other - arguments and the return value. - """ - # Initialize chroot - chroot = f"{args.work}/chroot_{suffix}" - if not auto_init and not os.path.islink(f"{chroot}/bin/sh"): - raise RuntimeError(f"Chroot does not exist: {chroot}") - if auto_init: - pmb.chroot.init(args, suffix) - - # Readable log message (without all the escaping) - msg = f"({suffix}) % " - for key, value in env.items(): - msg += f"{key}={value} " - if working_dir != "/": - msg += f"cd {working_dir}; " - msg += " ".join(cmd) - - # Merge env with defaults into env_all - env_all = {"CHARSET": "UTF-8", - "HISTFILE": "~/.ash_history", - "HOME": "/root", - "LANG": "UTF-8", - "PATH": pmb.config.chroot_path, - "PYTHONUNBUFFERED": "1", - "SHELL": "/bin/ash", - "TERM": "xterm"} - for key, value in env.items(): - env_all[key] = value - - # Build the command in steps and run it, e.g.: - # cmd: ["echo", "test"] - # cmd_chroot: ["/sbin/chroot", "/..._native", "/bin/sh", "-c", "echo test"] - # cmd_sudo: ["sudo", "env", "-i", "sh", "-c", "PATH=... /sbin/chroot ..."] - executables = executables_absolute_path() - cmd_chroot = [executables["chroot"], chroot, "/bin/sh", "-c", - pmb.helpers.run_core.flat_cmd(cmd, working_dir)] - cmd_sudo = pmb.config.sudo([ - "env", "-i", executables["sh"], "-c", - pmb.helpers.run_core.flat_cmd(cmd_chroot, env=env_all)] - ) - return pmb.helpers.run_core.core(args, msg, cmd_sudo, None, output, - output_return, check, True, - disable_timeout) diff --git a/pmb/chroot/shutdown.py b/pmb/chroot/shutdown.py deleted file mode 100644 index 20f90604..00000000 --- a/pmb/chroot/shutdown.py +++ /dev/null @@ -1,104 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import glob -import os -import socket -from contextlib import closing - -import pmb.chroot -import pmb.helpers.mount -import pmb.install.losetup -import pmb.parse.arch - - -def kill_adb(args): - """ - Kill adb daemon if it's running. - """ - port = 5038 - with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock: - if sock.connect_ex(("127.0.0.1", port)) == 0: - pmb.chroot.root(args, ["adb", "-P", str(port), "kill-server"]) - - -def kill_sccache(args): - """ - Kill sccache daemon if it's running. Unlike ccache it automatically spawns - a daemon when you call it and exits after some time of inactivity. - """ - port = 4226 - with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock: - if sock.connect_ex(("127.0.0.1", port)) == 0: - pmb.chroot.root(args, ["sccache", "--stop-server"]) - - -def shutdown_cryptsetup_device(args, name): - """ - :param name: cryptsetup device name, usually "pm_crypt" in pmbootstrap - """ - if not os.path.exists(args.work + "/chroot_native/dev/mapper/" + name): - return - pmb.chroot.apk.install(args, ["cryptsetup"]) - status = pmb.chroot.root(args, ["cryptsetup", "status", name], - output_return=True, check=False) - if not status: - logging.warning("WARNING: Failed to run cryptsetup to get the status" - " for " + name + ", assuming it is not mounted" - " (shutdown fails later if it is)!") - return - - if status.startswith("/dev/mapper/" + name + " is active."): - pmb.chroot.root(args, ["cryptsetup", "luksClose", name]) - elif status.startswith("/dev/mapper/" + name + " is inactive."): - # When "cryptsetup status" fails, the device is not mounted and we - # have a left over file (#83) - pmb.chroot.root(args, ["rm", "/dev/mapper/" + name]) - else: - raise RuntimeError("Failed to parse 'cryptsetup status' output!") - - -def shutdown(args, only_install_related=False): - # Stop daemons - kill_adb(args) - kill_sccache(args) - - # Umount installation-related paths (order is important!) - pmb.helpers.mount.umount_all(args, args.work + - "/chroot_native/mnt/install") - shutdown_cryptsetup_device(args, "pm_crypt") - - # Umount all losetup mounted images - chroot = args.work + "/chroot_native" - if pmb.helpers.mount.ismount(chroot + "/dev/loop-control"): - pattern = chroot + "/home/pmos/rootfs/*.img" - for path_outside in glob.glob(pattern): - path = path_outside[len(chroot):] - pmb.install.losetup.umount(args, path, auto_init=False) - - # Umount device rootfs and installer chroots - for prefix in ["rootfs", "installer"]: - path = f"{args.work}/chroot_{prefix}_{args.device}" - if os.path.exists(path): - pmb.helpers.mount.umount_all(args, path) - - # Remove "in-pmbootstrap" marker from all chroots. This marker indicates - # that pmbootstrap has set up all mount points etc. to run programs inside - # the chroots, but we want it gone afterwards (e.g. when the chroot - # contents get copied to a rootfs / installer image, or if creating an - # android recovery zip from its contents). - for marker in glob.glob(f"{args.work}/chroot_*/in-pmbootstrap"): - pmb.helpers.run.root(args, ["rm", marker]) - - if not only_install_related: - # Umount all folders inside args.work - # The folders are explicitly iterated over, so folders symlinked inside - # args.work get umounted as well (used in test_pkgrel_bump.py, #1595) - for path in glob.glob(args.work + "/*"): - pmb.helpers.mount.umount_all(args, path) - - # Clean up the rest - for arch in pmb.config.build_device_architectures: - if pmb.parse.arch.cpu_emulation_required(arch): - pmb.chroot.binfmt.unregister(args, arch) - logging.debug("Shutdown complete") diff --git a/pmb/chroot/user.py b/pmb/chroot/user.py deleted file mode 100644 index 9bad1e9b..00000000 --- a/pmb/chroot/user.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import pmb.chroot.root -import pmb.helpers.run -import pmb.helpers.run_core - - -def user(args, cmd, suffix="native", working_dir="/", output="log", - output_return=False, check=None, env={}, auto_init=True): - """ - Run a command inside a chroot as "user". We always use the BusyBox - implementation of 'su', because other implementations may override the PATH - environment variable (#1071). - - :param env: dict of environment variables to be passed to the command, e.g. - {"JOBS": "5"} - :param auto_init: automatically initialize the chroot - - See pmb.helpers.run_core.core() for a detailed description of all other - arguments and the return value. - """ - if "HOME" not in env: - env["HOME"] = "/home/pmos" - - flat_cmd = pmb.helpers.run_core.flat_cmd(cmd, env=env) - cmd = ["busybox", "su", "pmos", "-c", flat_cmd] - return pmb.chroot.root(args, cmd, suffix, working_dir, output, - output_return, check, {}, auto_init) - - -def exists(args, username, suffix="native"): - """ - Checks if username exists in the system - - :param username: User name - :returns: bool - """ - output = pmb.chroot.root(args, ["getent", "passwd", username], - suffix, output_return=True, check=False) - return len(output) > 0 diff --git a/pmb/chroot/zap.py b/pmb/chroot/zap.py deleted file mode 100644 index caf6f9f3..00000000 --- a/pmb/chroot/zap.py +++ /dev/null @@ -1,171 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import glob -import logging -import math -import os - -import pmb.chroot -import pmb.config.pmaports -import pmb.config.workdir -import pmb.helpers.pmaports -import pmb.helpers.run -import pmb.parse.apkindex - - -def zap(args, confirm=True, dry=False, pkgs_local=False, http=False, - pkgs_local_mismatch=False, pkgs_online_mismatch=False, distfiles=False, - rust=False, netboot=False): - """ - Shutdown everything inside the chroots (e.g. adb), umount - everything and then safely remove folders from the work-directory. - - :param dry: Only show what would be deleted, do not delete for real - :param pkgs_local: Remove *all* self-compiled packages (!) - :param http: Clear the http cache (used e.g. for the initial apk download) - :param pkgs_local_mismatch: Remove the packages that have - a different version compared to what is in the aports folder. - :param pkgs_online_mismatch: Clean out outdated binary packages - downloaded from mirrors (e.g. from Alpine) - :param distfiles: Clear the downloaded files cache - :param rust: Remove rust related caches - :param netboot: Remove images for netboot - - NOTE: This function gets called in pmb/config/init.py, with only args.work - and args.device set! - """ - # Get current work folder size - if not dry: - pmb.chroot.shutdown(args) - logging.debug("Calculate work folder size") - size_old = pmb.helpers.other.folder_size(args, args.work) - - # Delete packages with a different version compared to aports, - # then re-index - if pkgs_local_mismatch: - zap_pkgs_local_mismatch(args, confirm, dry) - - # Delete outdated binary packages - if pkgs_online_mismatch: - zap_pkgs_online_mismatch(args, confirm, dry) - - pmb.chroot.shutdown(args) - - # Deletion patterns for folders inside args.work - patterns = [ - "chroot_native", - "chroot_buildroot_*", - "chroot_installer_*", - "chroot_rootfs_*", - ] - if pkgs_local: - patterns += ["packages"] - if http: - patterns += ["cache_http"] - if distfiles: - patterns += ["cache_distfiles"] - if rust: - patterns += ["cache_rust"] - if netboot: - patterns += ["images_netboot"] - - # Delete everything matching the patterns - for pattern in patterns: - pattern = os.path.realpath(f"{args.work}/{pattern}") - matches = glob.glob(pattern) - for match in matches: - if (not confirm or - pmb.helpers.cli.confirm(args, f"Remove {match}?")): - logging.info(f"% rm -rf {match}") - if not dry: - pmb.helpers.run.root(args, ["rm", "-rf", match]) - - # Remove config init dates for deleted chroots - pmb.config.workdir.clean(args) - - # Chroots were zapped, so no repo lists exist anymore - pmb.helpers.other.cache["apk_repository_list_updated"].clear() - - # Print amount of cleaned up space - if dry: - logging.info("Dry run: nothing has been deleted") - else: - size_new = pmb.helpers.other.folder_size(args, args.work) - mb = (size_old - size_new) / 1024 - logging.info(f"Cleared up ~{math.ceil(mb)} MB of space") - - -def zap_pkgs_local_mismatch(args, confirm=True, dry=False): - channel = pmb.config.pmaports.read_config(args)["channel"] - if not os.path.exists(f"{args.work}/packages/{channel}"): - return - - question = "Remove binary packages that are newer than the corresponding" \ - f" pmaports (channel '{channel}')?" - if confirm and not pmb.helpers.cli.confirm(args, question): - return - - reindex = False - pattern = f"{args.work}/packages/{channel}/*/APKINDEX.tar.gz" - for apkindex_path in glob.glob(pattern): - # Delete packages without same version in aports - blocks = pmb.parse.apkindex.parse_blocks(apkindex_path) - for block in blocks: - pkgname = block["pkgname"] - origin = block["origin"] - version = block["version"] - arch = block["arch"] - - # Apk path - apk_path_short = f"{arch}/{pkgname}-{version}.apk" - apk_path = f"{args.work}/packages/{channel}/{apk_path_short}" - if not os.path.exists(apk_path): - logging.info("WARNING: Package mentioned in index not" - f" found: {apk_path_short}") - continue - - # Aport path - aport_path = pmb.helpers.pmaports.find(args, origin, False) - if not aport_path: - logging.info(f"% rm {apk_path_short}" - f" ({origin} aport not found)") - if not dry: - pmb.helpers.run.root(args, ["rm", apk_path]) - reindex = True - continue - - # Clear out any binary apks that do not match what is in aports - apkbuild = pmb.parse.apkbuild(f"{aport_path}/APKBUILD") - version_aport = f"{apkbuild['pkgver']}-r{apkbuild['pkgrel']}" - if version != version_aport: - logging.info(f"% rm {apk_path_short}" - f" ({origin} aport: {version_aport})") - if not dry: - pmb.helpers.run.root(args, ["rm", apk_path]) - reindex = True - - if reindex: - pmb.build.other.index_repo(args) - - -def zap_pkgs_online_mismatch(args, confirm=True, dry=False): - # Check whether we need to do anything - paths = glob.glob(f"{args.work}/cache_apk_*") - if not len(paths): - return - if (confirm and not pmb.helpers.cli.confirm(args, - "Remove outdated" - " binary packages?")): - return - - # Iterate over existing apk caches - for path in paths: - arch = os.path.basename(path).split("_", 2)[2] - suffix = f"buildroot_{arch}" - if arch == pmb.config.arch_native: - suffix = "native" - - # Clean the cache with apk - logging.info(f"({suffix}) apk -v cache clean") - if not dry: - pmb.chroot.root(args, ["apk", "-v", "cache", "clean"], suffix) diff --git a/pmb/ci/__init__.py b/pmb/ci/__init__.py deleted file mode 100644 index 39d3c61a..00000000 --- a/pmb/ci/__init__.py +++ /dev/null @@ -1,169 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import collections -import glob -import logging -import os -import shlex -import pmb.chroot -import pmb.helpers.cli - - -def get_ci_scripts(topdir): - """ Find 'pmbootstrap ci'-compatible scripts inside a git repository, and - parse their metadata (description, options). The reference is at: - https://postmarketos.org/pmb-ci - :param topdir: top directory of the git repository, get it with: - pmb.helpers.git.get_topdir() - :returns: a dict of CI scripts found in the git repository, e.g. - {"ruff": {"description": "lint all python scripts", - "options": []}, - ...} """ - ret = {} - for script in glob.glob(f"{topdir}/.ci/*.sh"): - is_pmb_ci_script = False - description = "" - options = [] - - with open(script) as handle: - for line in handle: - if line.startswith("# https://postmarketos.org/pmb-ci"): - is_pmb_ci_script = True - elif line.startswith("# Description: "): - description = line.split(": ", 1)[1].rstrip() - elif line.startswith("# Options: "): - options = line.split(": ", 1)[1].rstrip().split(" ") - elif not line.startswith("#"): - # Stop parsing after the block of comments on top - break - - if not is_pmb_ci_script: - continue - - if not description: - logging.error(f"ERROR: {script}: missing '# Description: …' line") - exit(1) - - for option in options: - if option not in pmb.config.ci_valid_options: - raise RuntimeError(f"{script}: unsupported option '{option}'." - " Typo in script or pmbootstrap too old?") - - short_name = os.path.basename(script).split(".", -1)[0] - ret[short_name] = {"description": description, - "options": options} - return ret - - -def sort_scripts_by_speed(scripts): - """ Order the scripts, so fast scripts run before slow scripts. Whether a - script is fast or not is determined by the '# Options: slow' comment in - the file. - :param scripts: return of get_ci_scripts() - :returns: same format as get_ci_scripts(), but as ordered dict with - fast scripts before slow scripts """ - ret = collections.OrderedDict() - - # Fast scripts first - for script_name, script in scripts.items(): - if "slow" in script["options"]: - continue - ret[script_name] = script - - # Then slow scripts - for script_name, script in scripts.items(): - if "slow" not in script["options"]: - continue - ret[script_name] = script - return ret - - -def ask_which_scripts_to_run(scripts_available): - """ Display an interactive prompt about which of the scripts the user - wishes to run, or all of them. - :param scripts_available: same format as get_ci_scripts() - :returns: either full scripts_available (all selected), or a subset """ - count = len(scripts_available.items()) - choices = ["all"] - - logging.info(f"Available CI scripts ({count}):") - for script_name, script in scripts_available.items(): - extra = "" - if "slow" in script["options"]: - extra += " (slow)" - logging.info(f"* {script_name}: {script['description']}{extra}") - choices += [script_name] - - selection = pmb.helpers.cli.ask("Which script?", None, "all", - complete=choices) - if selection == "all": - return scripts_available - - ret = {} - ret[selection] = scripts_available[selection] - return ret - - -def copy_git_repo_to_chroot(args, topdir): - """ Create a tarball of the git repo (including unstaged changes and new - files) and extract it in chroot_native. - :param topdir: top directory of the git repository, get it with: - pmb.helpers.git.get_topdir() """ - pmb.chroot.init(args) - tarball_path = f"{args.work}/chroot_native/tmp/git.tar.gz" - files = pmb.helpers.git.get_files(args, topdir) - - with open(f"{tarball_path}.files", "w") as handle: - for file in files: - handle.write(file) - handle.write("\n") - - pmb.helpers.run.user(args, ["tar", "-cf", tarball_path, "-T", - f"{tarball_path}.files"], topdir) - - ci_dir = "/home/pmos/ci" - pmb.chroot.user(args, ["rm", "-rf", ci_dir]) - pmb.chroot.user(args, ["mkdir", ci_dir]) - pmb.chroot.user(args, ["tar", "-xf", "/tmp/git.tar.gz"], - working_dir=ci_dir) - - -def run_scripts(args, topdir, scripts): - """ Run one of the given scripts after another, either natively or in a - chroot. Display a progress message and stop on error (without printing - a python stack trace). - :param topdir: top directory of the git repository, get it with: - pmb.helpers.git.get_topdir() - :param scripts: return of get_ci_scripts() """ - steps = len(scripts) - step = 0 - repo_copied = False - - for script_name, script in scripts.items(): - step += 1 - - where = "pmbootstrap chroot" - if "native" in script["options"]: - where = "native" - - script_path = f".ci/{script_name}.sh" - logging.info(f"*** ({step}/{steps}) RUNNING CI SCRIPT: {script_path}" - f" [{where}] ***") - - if "native" in script["options"]: - rc = pmb.helpers.run.user(args, [script_path], topdir, - output="tui") - continue - else: - # Run inside pmbootstrap chroot - if not repo_copied: - copy_git_repo_to_chroot(args, topdir) - repo_copied = True - - env = {"TESTUSER": "pmos"} - rc = pmb.chroot.root(args, [script_path], check=False, env=env, - working_dir="/home/pmos/ci", - output="tui") - if rc: - logging.error(f"ERROR: CI script failed: {script_name}") - exit(1) diff --git a/pmb/config/__init__.py b/pmb/config/__init__.py deleted file mode 100644 index 29557c98..00000000 --- a/pmb/config/__init__.py +++ /dev/null @@ -1,1156 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import multiprocessing -import os -import pmb.parse.arch -import sys -from typing import List - -# -# Exported functions -# -from pmb.config.load import load -from pmb.config.save import save -from pmb.config.merge_with_args import merge_with_args -from pmb.config.sudo import which_sudo - - -# -# Exported variables (internal configuration) -# -pmb_src = os.path.normpath(os.path.realpath(__file__) + "/../../..") -apk_keys_path = pmb_src + "/pmb/data/keys" -arch_native = pmb.parse.arch.alpine_native() - -# apk-tools minimum version -# https://pkgs.alpinelinux.org/packages?name=apk-tools&branch=edge -# Update this frequently to prevent a MITM attack with an outdated version -# (which may contain a vulnerable apk/openssl, and allows an attacker to -# exploit the system!) -apk_tools_min_version = {"edge": "2.14.0-r5", - "v3.19": "2.14.0-r5", - "v3.18": "2.14.0-r2", - "v3.17": "2.12.10-r1", - "v3.16": "2.12.9-r3", - "v3.15": "2.12.7-r3", - "v3.14": "2.12.7-r0", - "v3.13": "2.12.7-r0", - "v3.12": "2.10.8-r1"} - -# postmarketOS aports compatibility (checked against "version" in pmaports.cfg) -pmaports_min_version = "7" - -# Version of the work folder (as asked during 'pmbootstrap init'). Increase -# this number, whenever migration is required and provide the migration code, -# see migrate_work_folder()). -work_version = 6 - -# Minimum required version of postmarketos-ondev (pmbootstrap install --ondev). -# Try to support the current versions of all channels (edge, v21.03). When -# bumping > 0.4.0, remove compat code in pmb/install/_install.py (search for -# get_ondev_pkgver). -ondev_min_version = "0.2.0" - -# Programs that pmbootstrap expects to be available from the host system. Keep -# in sync with README.md, and try to keep the list as small as possible. The -# idea is to run almost everything in Alpine chroots. -required_programs = [ - "git", - "openssl", - "ps", - "tar", -] - - -def sudo(cmd: List[str]) -> List[str]: - """Adapt a command to run as root.""" - sudo = which_sudo() - if sudo: - return [sudo, *cmd] - else: - return cmd - - -# Keys saved in the config file (mostly what we ask in 'pmbootstrap init') -config_keys = [ - "aports", - "boot_size", - "build_default_device_arch", - "build_pkgs_on_install", - "ccache_size", - "device", - "extra_packages", - "extra_space", - "hostname", - "is_default_channel", - "jobs", - "kernel", - "keymap", - "locale", - "mirror_alpine", - "mirrors_postmarketos", - "nonfree_firmware", - "nonfree_userland", - "qemu_redir_stdio", - "ssh_key_glob", - "ssh_keys", - "sudo_timer", - "timezone", - "ui", - "ui_extras", - "user", - "work", -] - -# Config file/commandline default values -# $WORK gets replaced with the actual value for args.work (which may be -# overridden on the commandline) -defaults = { - "aports": "$WORK/cache_git/pmaports", - "ccache_size": "5G", - "is_default_channel": True, - "cipher": "aes-xts-plain64", - "config": (os.environ.get('XDG_CONFIG_HOME') or - os.path.expanduser("~/.config")) + "/pmbootstrap.cfg", - "device": "qemu-amd64", - "extra_packages": "none", - "fork_alpine": False, - "hostname": "", - "build_pkgs_on_install": True, - # A higher value is typically desired, but this can lead to VERY long open - # times on slower devices due to host systems being MUCH faster than the - # target device (see issue #429). - "iter_time": "200", - "jobs": str(multiprocessing.cpu_count() + 1), - "kernel": "stable", - "keymap": "", - "locale": "en_US.UTF-8", - "log": "$WORK/log.txt", - "mirror_alpine": "http://dl-cdn.alpinelinux.org/alpine/", - # NOTE: mirrors_postmarketos variable type is supposed to be - # comma-separated string, not a python list or any other type! - "mirrors_postmarketos": "http://mirror.postmarketos.org/postmarketos/", - "nonfree_firmware": True, - "nonfree_userland": False, - "ssh_keys": False, - "ssh_key_glob": "~/.ssh/id_*.pub", - "timezone": "GMT", - "ui": "console", - "ui_extras": False, - "user": "user", - "work": os.path.expanduser("~") + "/.local/var/pmbootstrap", - "boot_size": "256", - "extra_space": "0", - "sudo_timer": False, - "qemu_redir_stdio": False, - "build_default_device_arch": False, -} - - -# Whether we're connected to a TTY (which allows things like e.g. printing -# progress bars) -is_interactive = sys.stdout.isatty() and \ - sys.stderr.isatty() and \ - sys.stdin.isatty() - - -# ANSI escape codes to highlight stdout -styles = { - "BLUE": '\033[94m', - "BOLD": '\033[1m', - "GREEN": '\033[92m', - "RED": '\033[91m', - "YELLOW": '\033[93m', - "END": '\033[0m' -} - -if "NO_COLOR" in os.environ: - for style in styles.keys(): - styles[style] = "" - -# Supported filesystems and their fstools packages -filesystems = {"btrfs": "btrfs-progs", - "ext2": "e2fsprogs", - "ext4": "e2fsprogs", - "f2fs": "f2fs-tools", - "fat16": "dosfstools", - "fat32": "dosfstools"} - -# Legacy channels and their new names (pmb#2015) -pmaports_channels_legacy = {"stable": "v20.05", - "stable-next": "v21.03"} -# -# CHROOT -# - -# Usually the ID for the first user created is 1000. However, we want -# pmbootstrap to work even if the 'user' account inside the chroots has -# another UID, so we force it to be different. -chroot_uid_user = "12345" - -# The PATH variable used inside all chroots -chroot_path = ":".join([ - "/usr/lib/ccache/bin", - "/usr/local/sbin", - "/usr/local/bin", - "/usr/sbin:/usr/bin", - "/sbin", - "/bin" -]) - -# The PATH variable used on the host, to find the "chroot" and "sh" -# executables. As pmbootstrap runs as user, not as root, the location -# for the chroot executable may not be in the PATH (Debian). -chroot_host_path = os.environ["PATH"] + ":/usr/sbin/" - -# Folders that get mounted inside the chroot -# $WORK gets replaced with args.work -# $ARCH gets replaced with the chroot architecture (eg. x86_64, armhf) -# $CHANNEL gets replaced with the release channel (e.g. edge, v21.03) -# Use no more than one dir after /mnt/pmbootstrap, see remove_mnt_pmbootstrap. -chroot_mount_bind = { - "/proc": "/proc", - "$WORK/cache_apk_$ARCH": "/var/cache/apk", - "$WORK/cache_appstream/$ARCH/$CHANNEL": "/mnt/appstream-data", - "$WORK/cache_ccache_$ARCH": "/mnt/pmbootstrap/ccache", - "$WORK/cache_distfiles": "/var/cache/distfiles", - "$WORK/cache_git": "/mnt/pmbootstrap/git", - "$WORK/cache_go": "/mnt/pmbootstrap/go", - "$WORK/cache_rust": "/mnt/pmbootstrap/rust", - "$WORK/config_abuild": "/mnt/pmbootstrap/abuild-config", - "$WORK/config_apk_keys": "/etc/apk/keys", - "$WORK/cache_sccache": "/mnt/pmbootstrap/sccache", - "$WORK/images_netboot": "/mnt/pmbootstrap/netboot", - "$WORK/packages/$CHANNEL": "/mnt/pmbootstrap/packages", -} - -# Building chroots (all chroots, except for the rootfs_ chroot) get symlinks in -# the "pmos" user's home folder pointing to mountfolders from above. -# Rust packaging is new and still a bit weird in Alpine and postmarketOS. As of -# writing, we only have one package (squeekboard), and use cargo to download -# the source of all dependencies at build time and compile it. Usually, this is -# a no-go, but at least until this is resolved properly, let's cache the -# dependencies and downloads as suggested in "Caching the Cargo home in CI": -# https://doc.rust-lang.org/cargo/guide/cargo-home.html -# Go: cache the directories "go env GOMODCACHE" and "go env GOCACHE" point to, -# to avoid downloading dependencies over and over (GOMODCACHE, similar to the -# rust depends caching described above) and to cache build artifacts (GOCACHE, -# similar to ccache). -chroot_home_symlinks = { - "/mnt/pmbootstrap/abuild-config": "/home/pmos/.abuild", - "/mnt/pmbootstrap/ccache": "/home/pmos/.ccache", - "/mnt/pmbootstrap/go/gocache": "/home/pmos/.cache/go-build", - "/mnt/pmbootstrap/go/gomodcache": "/home/pmos/go/pkg/mod", - "/mnt/pmbootstrap/packages": "/home/pmos/packages/pmos", - "/mnt/pmbootstrap/rust/git/db": "/home/pmos/.cargo/git/db", - "/mnt/pmbootstrap/rust/registry/cache": "/home/pmos/.cargo/registry/cache", - "/mnt/pmbootstrap/rust/registry/index": "/home/pmos/.cargo/registry/index", - "/mnt/pmbootstrap/sccache": "/home/pmos/.cache/sccache", -} - -# Device nodes to be created in each chroot. Syntax for each entry: -# [permissions, type, major, minor, name] -chroot_device_nodes = [ - [666, "c", 1, 3, "null"], - [666, "c", 1, 5, "zero"], - [666, "c", 1, 7, "full"], - [644, "c", 1, 8, "random"], - [644, "c", 1, 9, "urandom"], -] - -# Age in hours that we keep the APKINDEXes before downloading them again. -# You can force-update them with 'pmbootstrap update'. -apkindex_retention_time = 4 - - -# When chroot is considered outdated (in seconds) -chroot_outdated = 3600 * 24 * 2 - -# -# BUILD -# -# Officially supported host/target architectures for postmarketOS. Only -# specify architectures supported by Alpine here. For cross-compiling, -# we need to generate the "musl-$ARCH" and "gcc-$ARCH" packages (use -# "pmbootstrap aportgen musl-armhf" etc.). -build_device_architectures = ["armhf", "armv7", "aarch64", "x86_64", "x86", "riscv64"] - -# Packages that will be installed in a chroot before it builds packages -# for the first time -build_packages = ["abuild", "build-base", "ccache", "git"] - -# -# KCONFIG CHECK -# -# Implemented value types: -# - boolean (e.g. '"ANDROID_PARANOID_NETWORK": False'): -# - False: disabled -# - True: enabled, either as module or built-in -# - array (e.g. '"ANDROID_BINDER_DEVICES": ["binder", "hwbinder"]'): -# - each element of the array must be contained in the kernel config string, -# in any order. The example above would accept the following in the config: -# CONFIG_ANDROID_BINDER_DEVICES="hwbinder,vndbinder,binder" -# - string (e.g. '"LSM": "lockdown,yama,loadpin,safesetid,integrity"'): -# - the value in the kernel config must be the same as the given string. Use -# this e.g. if the order of the elements is important. - -# Necessary kernel config options -kconfig_options = { - ">=0.0.0": { # all versions - "all": { # all arches - "ANDROID_PARANOID_NETWORK": False, - "BLK_DEV_INITRD": True, - "CGROUPS": True, - "CRYPTO_AES": True, - "CRYPTO_XTS": True, - "DEVTMPFS": True, - "DM_CRYPT": True, - "INPUT_EVDEV": True, - "EXT4_FS": True, - "KINETO_GAN": False, - "PFT": False, - "SEC_RESTRICT_ROOTING": False, - "SYSVIPC": True, - "TMPFS_POSIX_ACL": True, - "USE_VFB": False, - "VT": True, - } - }, - ">=2.6.0": { - "all": { - "BINFMT_ELF": True, - }, - }, - ">=3.10.0": { - "all": { - "BINFMT_SCRIPT": True, - }, - }, - ">=4.0.0": { - "all": { - "UEVENT_HELPER": True, - "USER_NS": True, - }, - }, - "<4.7.0": { - "all": { - "DEVPTS_MULTIPLE_INSTANCES": True, - } - }, - "<4.14.0": { - "all": { - "SAMSUNG_TUI": False, - "TZDEV": False, - } - }, - "<5.2.0": { - "armhf armv7 x86": { - "LBDAF": True - } - } -} - -# Necessary waydroid kernel config options (android app support) -kconfig_options_waydroid = { - ">=0.0.0": { # all versions - "all": { # all arches - "ANDROID_BINDERFS": False, - "ANDROID_BINDER_DEVICES": ["binder", "hwbinder", "vndbinder"], - "ANDROID_BINDER_IPC": True, - "ANDROID_BINDER_IPC_SELFTEST": False, - "BLK_DEV_LOOP": True, - "BPF_SYSCALL": True, - "BRIDGE": True, - "BRIDGE_VLAN_FILTERING": True, - "CGROUP_BPF": True, - "FUSE_FS": True, - "IP_NF_MANGLE": True, - "NETFILTER_XTABLES": True, - "NETFILTER_XT_MATCH_COMMENT": True, - "PSI": True, - "PSI_DEFAULT_DISABLED": False, - "SQUASHFS": True, - "SQUASHFS_XATTR": True, - "SQUASHFS_XZ": True, - "TMPFS_XATTR": True, - "TUN": True, - "VETH": True, - "VLAN_8021Q": True, # prerequisite for bridge - } - }, - ">=3.5": { - "all": { - "CROSS_MEMORY_ATTACH": True, - } - }, - ">=4.20.0": { - "all": { - "PSI": True, # required by userspace OOM killer - "PSI_DEFAULT_DISABLED": False, - } - }, - "<5.18": { # option has been dropped - "all": { - "ASHMEM": True, - } - } -} - -# Necessary iwd kernel config options (inet wireless daemon) -# Obtained from 'grep ADD_MISSING src/main.c' in iwd.git -kconfig_options_iwd = { - ">=0.0.0": { # all versions - "all": { # all arches - "ASYMMETRIC_KEY_TYPE": True, - "ASYMMETRIC_PUBLIC_KEY_SUBTYPE": True, - "CRYPTO_AES": True, - "CRYPTO_CBC": True, - "CRYPTO_CMAC": True, - "CRYPTO_DES": True, - "CRYPTO_ECB": True, - "CRYPTO_HMAC": True, - "CRYPTO_MD5": True, - "CRYPTO_SHA1": True, - "CRYPTO_SHA256": True, - "CRYPTO_SHA512": True, - "CRYPTO_USER_API_HASH": True, - "CRYPTO_USER_API_SKCIPHER": True, - "KEYS": True, - "KEY_DH_OPERATIONS": True, - "PKCS7_MESSAGE_PARSER": True, - "PKCS8_PRIVATE_KEY_PARSER": True, - "X509_CERTIFICATE_PARSER": True, - "RFKILL": True, - }, - }, -} - -# Necessary nftables kernel config options (firewall) -kconfig_options_nftables = { - ">=3.13.0": { # nftables support introduced here - "all": { # all arches - "NETFILTER": True, - "NF_CONNTRACK": True, - "NF_TABLES": True, - "NF_TABLES_INET": True, - "NFT_CT": True, - "NFT_LOG": True, - "NFT_LIMIT": True, - "NFT_MASQ": True, - "NFT_NAT": True, - "NFT_REJECT": True, - "NF_TABLES_IPV4": True, - "NF_REJECT_IPV4": True, - "IP_NF_IPTABLES": True, - "IP_NF_FILTER": True, - "IP_NF_TARGET_REJECT": True, - "IP_NF_NAT": True, - "NF_TABLES_IPV6": True, - "NF_REJECT_IPV6": True, - "IP6_NF_IPTABLES": True, - "IP6_NF_FILTER": True, - "IP6_NF_TARGET_REJECT": True, - "IP6_NF_NAT": True, - } - }, - ">=3.13.0 <5.17": { # option has been dropped - "all": { # all arches - "NFT_COUNTER": True, - }, - }, -} - -# Necessary kernel config options for containers (lxc, Docker) -kconfig_options_containers = { - ">=0.0.0": { # all versions, more specifically - since >=2.5~2.6 - "all": { # all arches - "NAMESPACES": True, - "NET_NS": True, - "PID_NS": True, - "IPC_NS": True, - "UTS_NS": True, - "CGROUPS": True, - "CGROUP_CPUACCT": True, - "CGROUP_DEVICE": True, - "CGROUP_FREEZER": True, - "CGROUP_SCHED": True, - "CPUSETS": True, - "KEYS": True, - "VETH": True, - "BRIDGE": True, # (also needed for waydroid) - "BRIDGE_NETFILTER": True, - "IP_NF_FILTER": True, - "IP_NF_TARGET_MASQUERADE": True, - "NETFILTER_XT_MATCH_ADDRTYPE": True, - "NETFILTER_XT_MATCH_CONNTRACK": True, - "NETFILTER_XT_MATCH_IPVS": True, - "NETFILTER_XT_MARK": True, - "NETFILTER_XT_TARGET_CHECKSUM": True, # Needed for lxc - "IP_NF_NAT": True, - "NF_NAT": True, - "POSIX_MQUEUE": True, - "BLK_DEV_DM": True, # Storage Drivers - "DUMMY": True, # Network Drivers - # "USER_NS": True, # This is already in pmOS kconfig check - "BLK_CGROUP": True, # Optional section - "BLK_DEV_THROTTLING": True, # Optional section - "CGROUP_PERF": True, # Optional section - "NET_CLS_CGROUP": True, # Optional section - "FAIR_GROUP_SCHED": True, # Optional section - "RT_GROUP_SCHED": True, # Optional section - "IP_NF_TARGET_REDIRECT": True, # Optional section - "IP_VS": True, # Optional section - "IP_VS_NFCT": True, # Optional section - "IP_VS_PROTO_TCP": True, # Optional section - "IP_VS_PROTO_UDP": True, # Optional section - "IP_VS_RR": True, # Optional section - # "EXT4_FS": True, # This is already in pmOS kconfig check - "EXT4_FS_POSIX_ACL": True, # Optional section - "EXT4_FS_SECURITY": True, # Optional section - } - }, - ">=3.2": { - "all": { - "CFS_BANDWIDTH": True, # Optional section - } - }, - ">=3.3": { - "all": { # all arches - "CHECKPOINT_RESTORE": True, # Needed for lxc - } - }, - ">=3.6": { - "all": { # all arches - "MEMCG": True, - "DM_THIN_PROVISIONING": True, # Storage Drivers - "SWAP": True, - }, - "x86 x86_64": { # only for x86, x86_64 (and sparc64, ia64) - "HUGETLB_PAGE": True, - "CGROUP_HUGETLB": True, # Optional section - } - }, - ">=3.6 <6.1_rc1": { # option has been dropped - "all": { - "MEMCG_SWAP": True, - } - }, - ">=3.7 <5.0": { - "all": { - "NF_NAT_IPV4": True, # Needed for lxc - "NF_NAT_IPV6": True, # Needed for lxc - }, - }, - ">=3.7": { - "all": { # all arches - "VXLAN": True, # Network Drivers - "IP6_NF_TARGET_MASQUERADE": True, # Needed for lxc - } - }, - ">=3.9": { - "all": { # all arches - "BRIDGE_VLAN_FILTERING": True, # Network Drivers (also for waydroid) - "MACVLAN": True, # Network Drivers - } - }, - ">=3.14": { - "all": { # all arches - "CGROUP_NET_PRIO": True, # Optional section - } - }, - ">=3.18": { - "all": { # all arches - "OVERLAY_FS": True, # Storage Drivers - } - }, - ">=3.19": { - "all": { # all arches - "IPVLAN": True, # Network Drivers - "SECCOMP": True, # Optional section - } - }, - ">=4.4": { - "all": { # all arches - "CGROUP_PIDS": True, # Optional section - } - }, -} - -# Necessary zram kernel config options (RAM disk with on-the-fly compression) -kconfig_options_zram = { - ">=3.14.0": { # zram support introduced here - "all": { # all arches - "ZRAM": True, - "ZSMALLOC": True, - "CRYPTO_LZ4": True, - "LZ4_COMPRESS": True, - "SWAP": True, - } - }, -} - -# Necessary netboot kernel config options -kconfig_options_netboot = { - ">=0.0.0": { # all versions - "all": { # all arches - "BLK_DEV_NBD": True, - } - }, -} - -# Necessary wireguard & wg-quick kernel config options -# From https://gitweb.gentoo.org/repo/gentoo.git/tree/net-vpn/wireguard-tools/wireguard-tools-1.0.20210914.ebuild?id=76aaa1eeb6f001baaa68e6946f917ebb091bbd9d # noqa -kconfig_options_wireguard = { - ">=5.6_rc1": { # all versions - "all": { # all arches - "WIREGUARD": True, - "IP_ADVANCED_ROUTER": True, - "IP_MULTIPLE_TABLES": True, - "IPV6_MULTIPLE_TABLES": True, - "NF_TABLES": True, - "NF_TABLES_IPV4": True, - "NF_TABLES_IPV6": True, - "NFT_CT": True, - "NFT_FIB": True, - "NFT_FIB_IPV4": True, - "NFT_FIB_IPV6": True, - "NF_CONNTRACK_MARK": True, - }, - }, -} - -# Necessary file system config options -kconfig_options_filesystems = { - ">=0.0.0": { # all versions - "all": { # all arches - "BTRFS_FS": True, - "EXFAT_FS": True, - "EXT4_FS": True, - "F2FS_FS": True, - }, - }, -} - -kconfig_options_usb_gadgets = { - ">=0.0.0": { # all versions - "all": { # all arches - # disable legacy gadgets - "USB_ETH": False, - "USB_FUNCTIONFS": False, - "USB_MASS_STORAGE": False, - "USB_G_SERIAL": False, - # enable configfs gadgets - "USB_CONFIGFS_NCM": True, # USB networking via NCM - "USB_CONFIGFS_RNDIS": True, # USB networking via RNDIS (legacy) - }, - }, -} - -# Various other kernel config options -kconfig_options_community = { - ">=0.0.0": { # all versions - "all": { # all arches - "INPUT_UINPUT": True, # buffyboard - "LEDS_TRIGGER_TIMER": True, # hfd-service - "NETFILTER_XT_MATCH_TCPMSS": True, # change MTU, e.g. for Wireguard - "NETFILTER_XT_TARGET_TCPMSS": True, # change MTU, e.g. for Wireguard - }, - }, -} - -# Necessary UEFI boot config options -kconfig_options_uefi = { - ">=0.0.0": { # all versions - "all": { # all arches - "EFI_STUB": True, - "EFI": True, - "DMI": True, - "EFI_ESRT": True, - "EFI_VARS_PSTORE": True, - "EFI_PARAMS_FROM_FDT": True, - "EFI_RUNTIME_WRAPPERS": True, - "EFI_GENERIC_STUB": True, - }, - "x86_64": { - "EFI_MIXED": True, - }, - }, - ">=6.1.0": { - "aarch64": { - # Required EFI booting compressed kernels on this arch - "EFI_ZBOOT": True, - }, - }, -} - -# -# PARSE -# - -# Variables belonging to a package or subpackage in APKBUILD files -apkbuild_package_attributes = { - "pkgdesc": {}, - "depends": {"array": True}, - "provides": {"array": True}, - "provider_priority": {"int": True}, - "install": {"array": True}, - "triggers": {"array": True}, - - # Packages can specify soft dependencies in "_pmb_recommends" to be - # explicitly installed by default, and not implicitly as a hard dependency - # of the package ("depends"). This makes these apps uninstallable, without - # removing the meta-package. (#1933). To disable this feature, use: - # "pmbootstrap install --no-recommends". - "_pmb_recommends": {"array": True}, - - # UI meta-packages can specify groups to which the user must be added - # to access specific hardware such as LED indicators. - "_pmb_groups": {"array": True}, - - # postmarketos-base, UI and device packages can use _pmb_select to provide - # additional configuration options in "pmbootstrap init" that allow - # selecting alternative providers for a virtual APK package. - "_pmb_select": {"array": True}, -} - -# Variables in APKBUILD files that get parsed -apkbuild_attributes = { - **apkbuild_package_attributes, - - "arch": {"array": True}, - "depends_dev": {"array": True}, - "makedepends": {"array": True}, - "checkdepends": {"array": True}, - "options": {"array": True}, - "triggers": {"array": True}, - "pkgname": {}, - "pkgrel": {}, - "pkgver": {}, - "subpackages": {}, - "url": {}, - - # cross-compilers - "makedepends_build": {"array": True}, - "makedepends_host": {"array": True}, - - # kernels - "_flavor": {}, - "_device": {}, - "_kernver": {}, - "_outdir": {}, - "_config": {}, - - # linux-edge - "_depends_dev": {"array": True}, - - # mesa - "_llvmver": {}, - - # Overridden packages - "_pkgver": {}, - "_pkgname": {}, - - # git commit - "_commit": {}, - "source": {"array": True}, - - # gcc - "_pkgbase": {}, - "_pkgsnap": {} -} - -# Reference: https://postmarketos.org/apkbuild-options -apkbuild_custom_valid_options = [ - "!pmb:crossdirect", - "!pmb:kconfigcheck", - "pmb:kconfigcheck-community", - "pmb:kconfigcheck-containers", - "pmb:kconfigcheck-iwd", - "pmb:kconfigcheck-netboot", - "pmb:kconfigcheck-nftables", - "pmb:kconfigcheck-uefi", - "pmb:kconfigcheck-waydroid", - "pmb:kconfigcheck-zram", - "pmb:cross-native", - "pmb:gpu-accel", - "pmb:strict", -] - -# Variables from deviceinfo. Reference: -deviceinfo_attributes = [ - # general - "format_version", - "name", - "manufacturer", - "codename", - "year", - "dtb", - "arch", - - # device - "chassis", - "keyboard", - "external_storage", - "screen_width", - "screen_height", - "dev_touchscreen", - "dev_touchscreen_calibration", - "append_dtb", - - # bootloader - "flash_method", - "boot_filesystem", - - # flash - "flash_heimdall_partition_kernel", - "flash_heimdall_partition_initfs", - "flash_heimdall_partition_rootfs", - "flash_heimdall_partition_system", # deprecated - "flash_heimdall_partition_vbmeta", - "flash_heimdall_partition_dtbo", - "flash_fastboot_partition_kernel", - "flash_fastboot_partition_rootfs", - "flash_fastboot_partition_system", # deprecated - "flash_fastboot_partition_vbmeta", - "flash_fastboot_partition_dtbo", - "flash_rk_partition_kernel", - "flash_rk_partition_rootfs", - "flash_rk_partition_system", # deprecated - "flash_mtkclient_partition_kernel", - "flash_mtkclient_partition_rootfs", - "flash_mtkclient_partition_vbmeta", - "flash_mtkclient_partition_dtbo", - "generate_legacy_uboot_initfs", - "kernel_cmdline", - "generate_bootimg", - "header_version", - "bootimg_qcdt", - "bootimg_mtk_mkimage", # deprecated - "bootimg_mtk_label_kernel", - "bootimg_mtk_label_ramdisk", - "bootimg_dtb_second", - "bootimg_custom_args", - "flash_offset_base", - "flash_offset_dtb", - "flash_offset_kernel", - "flash_offset_ramdisk", - "flash_offset_second", - "flash_offset_tags", - "flash_pagesize", - "flash_fastboot_max_size", - "flash_sparse", - "flash_sparse_samsung_format", - "rootfs_image_sector_size", - "sd_embed_firmware", - "sd_embed_firmware_step_size", - "partition_blacklist", - "boot_part_start", - "partition_type", - "root_filesystem", - "flash_kernel_on_update", - "cgpt_kpart", - "cgpt_kpart_start", - "cgpt_kpart_size", - - # weston - "weston_pixman_type", - - # keymaps - "keymaps", -] - -# Valid types for the 'chassis' attribute in deviceinfo -# See https://www.freedesktop.org/software/systemd/man/machine-info.html -deviceinfo_chassis_types = [ - "desktop", - "laptop", - "convertible", - "server", - "tablet", - "handset", - "watch", - "embedded", - "vm" -] - -# -# INITFS -# -initfs_hook_prefix = "postmarketos-mkinitfs-hook-" -default_ip = "172.16.42.1" - - -# -# INSTALL -# - -# Packages that will be installed inside the native chroot to perform -# the installation to the device. -# util-linux: losetup, fallocate -install_native_packages = ["cryptsetup", "util-linux", "parted"] -install_device_packages = ["postmarketos-base"] - -# -# FLASH -# - -flash_methods = [ - "0xffff", - "fastboot", - "heimdall", - "mtkclient", - "none", - "rkdeveloptool", - "uuu", -] - -# These folders will be mounted at the same location into the native -# chroot, before the flash programs get started. -flash_mount_bind = [ - "/sys/bus/usb/devices/", - "/sys/dev/", - "/sys/devices/", - "/dev/bus/usb/" -] - -""" -Flasher abstraction. Allowed variables: - -$BOOT: Path to the /boot partition -$DTB: Set to "-dtb" if deviceinfo_append_dtb is set, otherwise "" -$FLAVOR: Backwards compatibility with old mkinitfs (pma#660) -$IMAGE: Path to the combined boot/rootfs image -$IMAGE_SPLIT_BOOT: Path to the (split) boot image -$IMAGE_SPLIT_ROOT: Path to the (split) rootfs image -$PARTITION_KERNEL: Partition to flash the kernel/boot.img to -$PARTITION_ROOTFS: Partition to flash the rootfs to - -Fastboot specific: $KERNEL_CMDLINE -Heimdall specific: $PARTITION_INITFS -uuu specific: $UUU_SCRIPT -""" -flashers = { - "fastboot": { - "depends": [], # pmaports.cfg: supported_fastboot_depends - "actions": { - "list_devices": [["fastboot", "devices", "-l"]], - "flash_rootfs": [["fastboot", "flash", "$PARTITION_ROOTFS", - "$IMAGE"]], - "flash_kernel": [["fastboot", "flash", "$PARTITION_KERNEL", - "$BOOT/boot.img$FLAVOR"]], - "flash_vbmeta": [ - # Generate vbmeta image with "disable verification" flag - ["avbtool", "make_vbmeta_image", "--flags", "2", - "--padding_size", "$FLASH_PAGESIZE", - "--output", "/vbmeta.img"], - ["fastboot", "flash", "$PARTITION_VBMETA", "/vbmeta.img"], - ["rm", "-f", "/vbmeta.img"] - ], - "flash_dtbo": [["fastboot", "flash", "$PARTITION_DTBO", - "$BOOT/dtbo.img"]], - "boot": [["fastboot", "--cmdline", "$KERNEL_CMDLINE", - "boot", "$BOOT/boot.img$FLAVOR"]], - "flash_lk2nd": [["fastboot", "flash", "$PARTITION_KERNEL", - "$BOOT/lk2nd.img"]] - }, - }, - # Some devices provide Fastboot but using Android boot images is not - # practical for them (e.g. because they support booting from FAT32 - # partitions directly and/or the Android boot partition is too small). - # This can be implemented using --split (separate image files for boot and - # rootfs). - # This flasher allows flashing the split image files using Fastboot. - "fastboot-bootpart": { - "split": True, - "depends": ["android-tools"], - "actions": { - "list_devices": [["fastboot", "devices", "-l"]], - "flash_rootfs": [["fastboot", "flash", "$PARTITION_ROOTFS", - "$IMAGE_SPLIT_ROOT"]], - "flash_kernel": [["fastboot", "flash", "$PARTITION_KERNEL", - "$IMAGE_SPLIT_BOOT"]], - # TODO: Add support for boot - }, - }, - # Some Samsung devices need the initramfs to be baked into the kernel (e.g. - # i9070, i9100). We want the initramfs to be generated after the kernel was - # built, so we put the real initramfs on another partition (e.g. RECOVERY) - # and load it from the initramfs in the kernel. This method is called - # "isorec" (isolated recovery), a term coined by Lanchon. - "heimdall-isorec": { - "depends": ["heimdall"], - "actions": { - "list_devices": [["heimdall", "detect"]], - "flash_rootfs": [ - ["heimdall_wait_for_device.sh"], - ["heimdall", "flash", "--$PARTITION_ROOTFS", "$IMAGE"]], - "flash_kernel": [["heimdall_flash_kernel.sh", - "$BOOT/initramfs$FLAVOR", "$PARTITION_INITFS", - "$BOOT/vmlinuz$FLAVOR$DTB", - "$PARTITION_KERNEL"]] - }, - }, - # Some Samsung devices need a 'boot.img' file, just like the one generated - # fastboot compatible devices. Example: s7562, n7100 - "heimdall-bootimg": { - "depends": [], # pmaports.cfg: supported_heimdall_depends - "actions": { - "list_devices": [["heimdall", "detect"]], - "flash_rootfs": [ - ["heimdall_wait_for_device.sh"], - ["heimdall", "flash", "--$PARTITION_ROOTFS", "$IMAGE", - "$NO_REBOOT", "$RESUME"]], - "flash_kernel": [ - ["heimdall_wait_for_device.sh"], - ["heimdall", "flash", "--$PARTITION_KERNEL", - "$BOOT/boot.img$FLAVOR", "$NO_REBOOT", "$RESUME"]], - "flash_vbmeta": [ - ["avbtool", "make_vbmeta_image", "--flags", "2", - "--padding_size", "$FLASH_PAGESIZE", - "--output", "/vbmeta.img"], - ["heimdall", "flash", "--$PARTITION_VBMETA", "/vbmeta.img", - "$NO_REBOOT", "$RESUME"], - ["rm", "-f", "/vbmeta.img"]], - "flash_lk2nd": [ - ["heimdall_wait_for_device.sh"], - ["heimdall", "flash", "--$PARTITION_KERNEL", "$BOOT/lk2nd.img", - "$NO_REBOOT", "$RESUME"]] - }, - }, - "adb": { - "depends": ["android-tools"], - "actions": { - "list_devices": [["adb", "-P", "5038", "devices"]], - "sideload": [["echo", "< wait for any device >"], - ["adb", "-P", "5038", "wait-for-usb-sideload"], - ["adb", "-P", "5038", "sideload", - "$RECOVERY_ZIP"]], - } - }, - "uuu": { - "depends": ["nxp-mfgtools-uuu"], - "actions": { - "flash_rootfs": [ - # There's a bug(?) in uuu where it clobbers the path in the cmd - # script if the script is not in pwd... - ["cp", "$UUU_SCRIPT", "./flash_script.lst"], - ["uuu", "flash_script.lst"], - ], - }, - }, - "rkdeveloptool": { - "split": True, - "depends": ["rkdeveloptool"], - "actions": { - "list_devices": [["rkdeveloptool", "list"]], - "flash_rootfs": [ - ["rkdeveloptool", "write-partition", "$PARTITION_ROOTFS", - "$IMAGE_SPLIT_ROOT"] - ], - "flash_kernel": [ - ["rkdeveloptool", "write-partition", "$PARTITION_KERNEL", - "$IMAGE_SPLIT_BOOT"] - ], - }, - }, - "mtkclient": { - "depends": ["mtkclient"], - "actions": { - "flash_rootfs": [["mtk", "w", "$PARTITION_ROOTFS", - "$IMAGE"]], - "flash_kernel": [["mtk", "w", "$PARTITION_KERNEL", - "$BOOT/boot.img$FLAVOR"]], - "flash_vbmeta": [ - # Generate vbmeta image with "disable verification" flag - ["avbtool", "make_vbmeta_image", "--flags", "2", - "--padding_size", "$FLASH_PAGESIZE", - "--output", "/vbmeta.img"], - ["mtk", "w", "$PARTITION_VBMETA", "/vbmeta.img"], - ["rm", "-f", "/vbmeta.img"] - ], - "flash_dtbo": [["mtk", "w", "$PARTITION_DTBO", - "$BOOT/dtbo.img"]], - "flash_lk2nd": [["mtk", "w", "$PARTITION_KERNEL", - "$BOOT/lk2nd.img"]] - } - } -} - -# -# GIT -# -git_repos = { - "aports_upstream": "https://gitlab.alpinelinux.org/alpine/aports.git", - "pmaports": "https://gitlab.com/postmarketOS/pmaports.git", -} - -# When a git repository is considered outdated (in seconds) -# (Measuring timestamp of FETCH_HEAD: https://stackoverflow.com/a/9229377) -git_repo_outdated = 3600 * 24 * 2 - -# -# APORTGEN -# -aportgen = { - "cross": { - "prefixes": ["busybox-static", "gcc", "musl", "grub-efi"], - "confirm_overwrite": False, - }, - "device/testing": { - "prefixes": ["device", "linux"], - "confirm_overwrite": True, - } -} - -# Use a deterministic mirror URL instead of CDN for aportgen. Otherwise we may -# generate a pmaport that wraps an apk from Alpine (e.g. musl-armv7) locally -# with one up-to-date mirror given by the CDN. But then the build will fail if -# CDN picks an outdated mirror for CI or BPO. -aportgen_mirror_alpine = "http://dl-4.alpinelinux.org/alpine/" - -# -# NEWAPKBUILD -# Options passed through to the "newapkbuild" command from Alpine Linux. They -# are duplicated here, so we can use Python's argparse for argument parsing and -# help page display. The -f (force) flag is not defined here, as we use that in -# the Python code only and don't pass it through. -# -newapkbuild_arguments_strings = [ - ["-n", "pkgname", "set package name (only use with SRCURL)"], - ["-d", "pkgdesc", "set package description"], - ["-l", "license", "set package license identifier from" - " "], - ["-u", "url", "set package URL"], -] -newapkbuild_arguments_switches_pkgtypes = [ - ["-a", "autotools", "create autotools package (use ./configure ...)"], - ["-C", "cmake", "create CMake package (assume cmake/ is there)"], - ["-m", "meson", "create meson package (assume meson.build is there)"], - ["-p", "perl", "create perl package (assume Makefile.PL is there)"], - ["-y", "python", "create python package (assume setup.py is there)"], - ["-e", "python_gpep517", "create python package (assume pyproject.toml is there)"], - ["-r", "rust", "create rust package (assume Cargo.toml is there)"], -] -newapkbuild_arguments_switches_other = [ - ["-s", "sourceforge", "use sourceforge source URL"], - ["-c", "copy_samples", "copy a sample init.d, conf.d and install script"], -] - -# -# UPGRADE -# -# Patterns of package names to ignore for automatic pmaport upgrading -# ("pmbootstrap aportupgrade --all") -upgrade_ignore = ["device-*", "firmware-*", "linux-*", "postmarketos-*", - "*-aarch64", "*-armhf", "*-armv7", "*-riscv64"] - -# -# SIDELOAD -# -sideload_sudo_prompt = "[sudo] password for %u@%h: " - -# -# CI -# -# Valid options für 'pmbootstrap ci', see https://postmarketos.org/pmb-ci -ci_valid_options = ["native", "slow"] diff --git a/pmb/config/init.py b/pmb/config/init.py deleted file mode 100644 index f3ee980b..00000000 --- a/pmb/config/init.py +++ /dev/null @@ -1,768 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import glob -import json -import os -import shutil - -import pmb.aportgen -import pmb.config -import pmb.config.pmaports -import pmb.helpers.cli -import pmb.helpers.devices -import pmb.helpers.git -import pmb.helpers.http -import pmb.helpers.logging -import pmb.helpers.other -import pmb.helpers.pmaports -import pmb.helpers.run -import pmb.helpers.ui -import pmb.chroot.zap -import pmb.parse.deviceinfo -import pmb.parse._apkbuild - - -def require_programs(): - missing = [] - for program in pmb.config.required_programs: - if not shutil.which(program): - missing.append(program) - if missing: - raise RuntimeError("Can't find all programs required to run" - " pmbootstrap. Please install first:" - f" {', '.join(missing)}") - - -def ask_for_username(args): - """ - Ask for a reasonable username for the non-root user. - - :returns: the username - """ - while True: - ret = pmb.helpers.cli.ask("Username", None, args.user, False, - "[a-z_][a-z0-9_-]*") - if ret == "root": - logging.fatal("ERROR: don't put \"root\" here. This is about" - " creating an additional non-root user. Don't worry," - " the root user will also be created ;)") - continue - return ret - - -def ask_for_work_path(args): - """ - Ask for the work path, until we can create it (when it does not exist) and - write into it. - :returns: (path, exists) - * path: is the full path, with expanded ~ sign - * exists: is False when the folder did not exist before we tested - whether we can create it - """ - logging.info("Location of the 'work' path. Multiple chroots" - " (native, device arch, device rootfs) will be created" - " in there.") - while True: - try: - work = os.path.expanduser(pmb.helpers.cli.ask( - "Work path", None, args.work, False)) - work = os.path.realpath(work) - exists = os.path.exists(work) - - # Work must not be inside the pmbootstrap path - if (work == pmb.config.pmb_src or - work.startswith(f"{pmb.config.pmb_src}/")): - logging.fatal("ERROR: The work path must not be inside the" - " pmbootstrap path. Please specify another" - " location.") - continue - - # Create the folder with a version file - if not exists: - os.makedirs(work, 0o700, True) - - # If the version file doesn't exists yet because we either just - # created the work directory or the user has deleted it for - # whatever reason then we need to write initialize it. - work_version_file = f"{work}/version" - if not os.path.isfile(work_version_file): - with open(work_version_file, "w") as handle: - handle.write(f"{pmb.config.work_version}\n") - - # Create cache_git dir, so it is owned by the host system's user - # (otherwise pmb.helpers.mount.bind would create it as root) - os.makedirs(f"{work}/cache_git", 0o700, True) - return (work, exists) - except OSError: - logging.fatal("ERROR: Could not create this folder, or write" - " inside it! Please try again.") - - -def ask_for_channel(args): - """ Ask for the postmarketOS release channel. The channel dictates, which - pmaports branch pmbootstrap will check out, and which repository URLs - will be used when initializing chroots. - :returns: channel name (e.g. "edge", "v21.03") """ - channels_cfg = pmb.helpers.git.parse_channels_cfg(args) - count = len(channels_cfg["channels"]) - - # List channels - logging.info("Choose the postmarketOS release channel.") - logging.info(f"Available ({count}):") - # Only show the first 3 releases. This includes edge, the latest supported - # release plus one. Should be a good solution until new needs arrive when - # we might want to have a custom channels.cfg attribute. - for channel, channel_data in list(channels_cfg["channels"].items())[:3]: - logging.info(f"* {channel}: {channel_data['description']}") - - # Default for first run: "recommended" from channels.cfg - # Otherwise, if valid: channel from pmaports.cfg of current branch - # The actual channel name is not saved in pmbootstrap.cfg, because then we - # would need to sync it with what is checked out in pmaports.git. - default = pmb.config.pmaports.read_config(args)["channel"] - choices = channels_cfg["channels"].keys() - if args.is_default_channel or default not in choices: - default = channels_cfg["meta"]["recommended"] - - # Ask until user gives valid channel - while True: - ret = pmb.helpers.cli.ask("Channel", None, default, - complete=choices) - if ret in choices: - return ret - logging.fatal("ERROR: Invalid channel specified, please type in one" - " from the list above.") - - -def ask_for_ui(args, info): - ui_list = pmb.helpers.ui.list(args, info["arch"]) - hidden_ui_count = 0 - device_is_accelerated = info.get("gpu_accelerated") == "true" - if not device_is_accelerated: - for i in reversed(range(len(ui_list))): - pkgname = f"postmarketos-ui-{ui_list[i][0]}" - apkbuild = pmb.helpers.pmaports.get(args, pkgname, - subpackages=False, - must_exist=False) - if apkbuild and "pmb:gpu-accel" in apkbuild["options"]: - ui_list.pop(i) - hidden_ui_count += 1 - - # Get default - default = args.ui - if default not in dict(ui_list).keys(): - default = pmb.config.defaults["ui"] - - logging.info(f"Available user interfaces ({len(ui_list) - 1}): ") - ui_completion_list = [] - for ui in ui_list: - logging.info(f"* {ui[0]}: {ui[1]}") - ui_completion_list.append(ui[0]) - if hidden_ui_count > 0: - logging.info(f"NOTE: {hidden_ui_count} UIs are hidden because" - " \"deviceinfo_gpu_accelerated\" is not set (see" - " https://postmarketos.org/deviceinfo).") - while True: - ret = pmb.helpers.cli.ask("User interface", None, default, True, - complete=ui_completion_list) - if ret in dict(ui_list).keys(): - return ret - logging.fatal("ERROR: Invalid user interface specified, please type in" - " one from the list above.") - - -def ask_for_ui_extras(args, ui): - apkbuild = pmb.helpers.pmaports.get(args, f"postmarketos-ui-{ui}", - subpackages=False, must_exist=False) - if not apkbuild: - return False - - extra = apkbuild["subpackages"].get(f"postmarketos-ui-{ui}-extras") - if extra is None: - return False - - logging.info("This user interface has an extra package:" - f" {extra['pkgdesc']}") - - return pmb.helpers.cli.confirm(args, "Enable this package?", - default=args.ui_extras) - - -def ask_for_keymaps(args, info): - if "keymaps" not in info or info["keymaps"].strip() == "": - return "" - options = info["keymaps"].split(' ') - logging.info(f"Available keymaps for device ({len(options)}): " - f"{', '.join(options)}") - if args.keymap == "": - args.keymap = options[0] - - while True: - ret = pmb.helpers.cli.ask("Keymap", None, args.keymap, - True, complete=options) - if ret in options: - return ret - logging.fatal("ERROR: Invalid keymap specified, please type in" - " one from the list above.") - - -def ask_for_timezone(args): - localtimes = ["/etc/zoneinfo/localtime", "/etc/localtime"] - zoneinfo_path = "/usr/share/zoneinfo/" - for localtime in localtimes: - if not os.path.exists(localtime): - continue - tz = "" - if os.path.exists(localtime): - tzpath = os.path.realpath(localtime) - tzpath = tzpath.rstrip() - if os.path.exists(tzpath): - try: - _, tz = tzpath.split(zoneinfo_path) - except: - pass - if tz: - logging.info(f"Your host timezone: {tz}") - if pmb.helpers.cli.confirm(args, - "Use this timezone instead of GMT?", - default="y"): - return tz - logging.info("WARNING: Unable to determine timezone configuration on host," - " using GMT.") - return "GMT" - - -def ask_for_provider_select(args, apkbuild, providers_cfg): - """ - Ask for selectable providers that are specified using "_pmb_select" - in a APKBUILD. - - :param apkbuild: the APKBUILD with the _pmb_select - :param providers_cfg: the configuration section with previously selected - providers. Updated with new providers after selection - """ - for select in apkbuild["_pmb_select"]: - providers = pmb.helpers.pmaports.find_providers(args, select) - logging.info(f"Available providers for {select} ({len(providers)}):") - - has_default = False - providers_short = {} - last_selected = providers_cfg.get(select, 'default') - - for pkgname, pkg in providers: - # Strip provider prefix if possible - short = pkgname - if short.startswith(f'{select}-'): - short = short[len(f"{select}-"):] - - # Allow selecting the package using both short and long name - providers_short[pkgname] = pkgname - providers_short[short] = pkgname - - if pkgname == last_selected: - last_selected = short - - if not has_default and pkg.get('provider_priority', 0) != 0: - # Display as default provider - styles = pmb.config.styles - logging.info(f"* {short}: {pkg['pkgdesc']} " - f"{styles['BOLD']}(default){styles['END']}") - has_default = True - else: - logging.info(f"* {short}: {pkg['pkgdesc']}") - - while True: - ret = pmb.helpers.cli.ask("Provider", None, last_selected, True, - complete=providers_short.keys()) - - if has_default and ret == 'default': - # Selecting default means to not select any provider explicitly - # In other words, apk chooses it automatically based on - # "provider_priority" - if select in providers_cfg: - del providers_cfg[select] - break - if ret in providers_short: - providers_cfg[select] = providers_short[ret] - break - logging.fatal("ERROR: Invalid provider specified, please type in" - " one from the list above.") - - -def ask_for_provider_select_pkg(args, pkgname, providers_cfg): - """ - Look up the APKBUILD for the specified pkgname and ask for selectable - providers that are specified using "_pmb_select". - - :param pkgname: name of the package to search APKBUILD for - :param providers_cfg: the configuration section with previously selected - providers. Updated with new providers after selection - """ - apkbuild = pmb.helpers.pmaports.get(args, pkgname, - subpackages=False, must_exist=False) - if not apkbuild: - return - - ask_for_provider_select(args, apkbuild, providers_cfg) - - -def ask_for_device_kernel(args, device): - """ - Ask for the kernel that should be used with the device. - - :param device: code name, e.g. "lg-mako" - :returns: None if the kernel is hardcoded in depends without subpackages - :returns: kernel type ("downstream", "stable", "mainline", ...) - """ - # Get kernels - kernels = pmb.parse._apkbuild.kernels(args, device) - if not kernels: - return args.kernel - - # Get default - default = args.kernel - if default not in kernels: - default = list(kernels.keys())[0] - - # Ask for kernel (extra message when downstream and upstream are available) - logging.info("Which kernel do you want to use with your device?") - if "downstream" in kernels: - logging.info("Downstream kernels are typically the outdated Android" - " kernel forks.") - if "downstream" in kernels and len(kernels) > 1: - logging.info("Upstream kernels (mainline, stable, ...) get security" - " updates, but may have less working features than" - " downstream kernels.") - - # List kernels - logging.info(f"Available kernels ({len(kernels)}):") - for type in sorted(kernels.keys()): - logging.info(f"* {type}: {kernels[type]}") - while True: - ret = pmb.helpers.cli.ask("Kernel", None, default, True, - complete=kernels) - if ret in kernels.keys(): - return ret - logging.fatal("ERROR: Invalid kernel specified, please type in one" - " from the list above.") - return ret - - -def ask_for_device_nonfree(args, device): - """ - Ask the user about enabling proprietary firmware (e.g. Wifi) and userland - (e.g. GPU drivers). All proprietary components are in subpackages - $pkgname-nonfree-firmware and $pkgname-nonfree-userland, and we show the - description of these subpackages (so they can indicate which peripherals - are affected). - - :returns: answers as dict, e.g. {"firmware": True, "userland": False} - """ - # Parse existing APKBUILD or return defaults (when called from test case) - apkbuild_path = pmb.helpers.devices.find_path(args, device, 'APKBUILD') - ret = {"firmware": args.nonfree_firmware, - "userland": args.nonfree_userland} - if not apkbuild_path: - return ret - apkbuild = pmb.parse.apkbuild(apkbuild_path) - - # Only run when there is a "nonfree" subpackage - nonfree_found = False - for subpackage in apkbuild["subpackages"].keys(): - if subpackage.startswith(f"device-{device}-nonfree"): - nonfree_found = True - if not nonfree_found: - return ret - - # Short explanation - logging.info("This device has proprietary components, which trade some of" - " your freedom with making more peripherals work.") - logging.info("We would like to offer full functionality without hurting" - " your freedom, but this is currently not possible for your" - " device.") - - # Ask for firmware and userland individually - for type in ["firmware", "userland"]: - subpkgname = f"device-{device}-nonfree-{type}" - subpkg = apkbuild["subpackages"].get(subpkgname, {}) - if subpkg is None: - raise RuntimeError("Cannot find subpackage function for " - f"{subpkgname}") - if subpkg: - logging.info(f"{subpkgname}: {subpkg['pkgdesc']}") - ret[type] = pmb.helpers.cli.confirm(args, "Enable this package?", - default=ret[type]) - return ret - - -def ask_for_device(args): - vendors = sorted(pmb.helpers.devices.list_vendors(args)) - logging.info("Choose your target device vendor (either an " - "existing one, or a new one for porting).") - logging.info(f"Available vendors ({len(vendors)}): {', '.join(vendors)}") - - current_vendor = None - current_codename = None - if args.device: - current_vendor = args.device.split("-", 1)[0] - current_codename = args.device.split("-", 1)[1] - - while True: - vendor = pmb.helpers.cli.ask("Vendor", None, current_vendor, - False, r"[a-z0-9]+", vendors) - - new_vendor = vendor not in vendors - codenames = [] - if new_vendor: - logging.info("The specified vendor ({}) could not be found in" - " existing ports, do you want to start a new" - " port?".format(vendor)) - if not pmb.helpers.cli.confirm(args, default=True): - continue - else: - # Unmaintained devices can be selected, but are not displayed - devices = sorted(pmb.helpers.devices.list_codenames( - args, vendor, unmaintained=False)) - # Remove "vendor-" prefixes from device list - codenames = [x.split('-', 1)[1] for x in devices] - logging.info(f"Available codenames ({len(codenames)}): " + - ", ".join(codenames)) - - if current_vendor != vendor: - current_codename = '' - codename = pmb.helpers.cli.ask("Device codename", None, - current_codename, False, r"[a-z0-9]+", - codenames) - - device = f"{vendor}-{codename}" - device_path = pmb.helpers.devices.find_path(args, device, 'deviceinfo') - device_exists = device_path is not None - if not device_exists: - if device == args.device: - raise RuntimeError( - "This device does not exist anymore, check" - " " - " to see if it was renamed") - logging.info("You are about to do" - f" a new device port for '{device}'.") - if not pmb.helpers.cli.confirm(args, default=True): - current_vendor = vendor - continue - - # New port creation confirmed - logging.info("Generating new aports for: {}...".format(device)) - pmb.aportgen.generate(args, f"device-{device}") - pmb.aportgen.generate(args, f"linux-{device}") - elif "/unmaintained/" in device_path: - apkbuild = f"{device_path[:-len('deviceinfo')]}APKBUILD" - unmaintained = pmb.parse._apkbuild.unmaintained(apkbuild) - logging.info(f"WARNING: {device} is unmaintained: {unmaintained}") - if not pmb.helpers.cli.confirm(args): - continue - break - - kernel = ask_for_device_kernel(args, device) - nonfree = ask_for_device_nonfree(args, device) - return (device, device_exists, kernel, nonfree) - - -def ask_for_additional_options(args, cfg): - # Allow to skip additional options - logging.info("Additional options:" - f" extra free space: {args.extra_space} MB," - f" boot partition size: {args.boot_size} MB," - f" parallel jobs: {args.jobs}," - f" ccache per arch: {args.ccache_size}," - f" sudo timer: {args.sudo_timer}," - f" mirror: {','.join(args.mirrors_postmarketos)}") - - if not pmb.helpers.cli.confirm(args, "Change them?", - default=False): - return - - # Extra space - logging.info("Set extra free space to 0, unless you ran into a 'No space" - " left on device' error. In that case, the size of the" - " rootfs could not be calculated properly on your machine," - " and we need to add extra free space to make the image big" - " enough to fit the rootfs (pmbootstrap#1904)." - " How much extra free space do you want to add to the image" - " (in MB)?") - answer = pmb.helpers.cli.ask("Extra space size", None, - args.extra_space, validation_regex="^[0-9]+$") - cfg["pmbootstrap"]["extra_space"] = answer - - # Boot size - logging.info("What should be the boot partition size (in MB)?") - answer = pmb.helpers.cli.ask("Boot size", None, args.boot_size, - validation_regex="^[1-9][0-9]*$") - cfg["pmbootstrap"]["boot_size"] = answer - - # Parallel job count - logging.info("How many jobs should run parallel on this machine, when" - " compiling?") - answer = pmb.helpers.cli.ask("Jobs", None, args.jobs, - validation_regex="^[1-9][0-9]*$") - cfg["pmbootstrap"]["jobs"] = answer - - # Ccache size - logging.info("We use ccache to speed up building the same code multiple" - " times. How much space should the ccache folder take up per" - " architecture? After init is through, you can check the" - " current usage with 'pmbootstrap stats'. Answer with 0 for" - " infinite.") - regex = "0|[0-9]+(k|M|G|T|Ki|Mi|Gi|Ti)" - answer = pmb.helpers.cli.ask("Ccache size", None, args.ccache_size, - lowercase_answer=False, - validation_regex=regex) - cfg["pmbootstrap"]["ccache_size"] = answer - - # Sudo timer - logging.info("pmbootstrap does everything in Alpine Linux chroots, so" - " your host system does not get modified. In order to" - " work with these chroots, pmbootstrap calls 'sudo'" - " internally. For long running operations, it is possible" - " that you'll have to authorize sudo more than once.") - answer = pmb.helpers.cli.confirm(args, "Enable background timer to prevent" - " repeated sudo authorization?", - default=args.sudo_timer) - cfg["pmbootstrap"]["sudo_timer"] = str(answer) - - # Mirrors - # prompt for mirror change - logging.info("Selected mirror:" - f" {','.join(args.mirrors_postmarketos)}") - if pmb.helpers.cli.confirm(args, "Change mirror?", default=False): - mirrors = ask_for_mirror(args) - cfg["pmbootstrap"]["mirrors_postmarketos"] = ",".join(mirrors) - - -def ask_for_mirror(args): - regex = "^[1-9][0-9]*$" # single non-zero number only - - json_path = pmb.helpers.http.download( - args, "https://postmarketos.org/mirrors.json", "pmos_mirrors", - cache=False) - with open(json_path, "rt") as handle: - s = handle.read() - - logging.info("List of available mirrors:") - mirrors = json.loads(s) - keys = mirrors.keys() - i = 1 - for key in keys: - logging.info(f"[{i}]\t{key} ({mirrors[key]['location']})") - i += 1 - - urls = [] - for key in keys: - # accept only http:// or https:// urls - http_count = 0 # remember if we saw any http:// only URLs - link_list = [] - for k in mirrors[key]["urls"]: - if k.startswith("http"): - link_list.append(k) - if k.startswith("http://"): - http_count += 1 - # remove all https urls if there is more that one URL and one of - # them was http:// - if http_count > 0 and len(link_list) > 1: - link_list = [k for k in link_list if not k.startswith("https")] - if len(link_list) > 0: - urls.append(link_list[0]) - - mirror_indexes = [] - for mirror in args.mirrors_postmarketos: - for i in range(len(urls)): - if urls[i] == mirror: - mirror_indexes.append(str(i + 1)) - break - - mirrors_list = [] - # require one valid mirror index selected by user - while len(mirrors_list) != 1: - answer = pmb.helpers.cli.ask("Select a mirror", None, - ",".join(mirror_indexes), - validation_regex=regex) - mirrors_list = [] - for i in answer.split(","): - idx = int(i) - 1 - if 0 <= idx < len(urls): - mirrors_list.append(urls[idx]) - if len(mirrors_list) != 1: - logging.info("You must select one valid mirror!") - - return mirrors_list - - -def ask_for_hostname(args, device): - while True: - ret = pmb.helpers.cli.ask("Device hostname (short form, e.g. 'foo')", - None, (args.hostname or device), True) - if not pmb.helpers.other.validate_hostname(ret): - continue - # Don't store device name in user's config (gets replaced in install) - if ret == device: - return "" - return ret - - -def ask_for_ssh_keys(args): - if not len(glob.glob(os.path.expanduser("~/.ssh/id_*.pub"))): - return False - return pmb.helpers.cli.confirm(args, - "Would you like to copy your SSH public" - " keys to the device?", - default=args.ssh_keys) - - -def ask_build_pkgs_on_install(args): - logging.info("After pmaports are changed, the binary packages may be" - " outdated. If you want to install postmarketOS without" - " changes, reply 'n' for a faster installation.") - return pmb.helpers.cli.confirm(args, "Build outdated packages during" - " 'pmbootstrap install'?", - default=args.build_pkgs_on_install) - - -def get_locales(): - ret = [] - list_path = f"{pmb.config.pmb_src}/pmb/data/locales" - with open(list_path, "r") as handle: - for line in handle: - ret += [line.rstrip()] - return ret - - -def ask_for_locale(args): - locales = get_locales() - logging.info("Choose your preferred locale, like e.g. en_US. Only UTF-8" - " is supported, it gets appended automatically. Use" - " tab-completion if needed.") - - while True: - ret = pmb.helpers.cli.ask("Locale", - choices=None, - default=args.locale.replace(".UTF-8", ""), - lowercase_answer=False, - complete=locales) - ret = ret.replace(".UTF-8", "") - if ret not in locales: - logging.info("WARNING: this locale is not in the list of known" - " valid locales.") - if pmb.helpers.cli.ask() != "y": - # Ask again - continue - - return f"{ret}.UTF-8" - - -def frontend(args): - require_programs() - - # Work folder (needs to be first, so we can create chroots early) - cfg = pmb.config.load(args) - work, work_exists = ask_for_work_path(args) - cfg["pmbootstrap"]["work"] = work - - # Update args and save config (so chroots and 'pmbootstrap log' work) - pmb.helpers.args.update_work(args, work) - pmb.config.save(args, cfg) - - # Migrate work dir if necessary - pmb.helpers.other.migrate_work_folder(args) - - # Clone pmaports - pmb.config.pmaports.init(args) - - # Choose release channel, possibly switch pmaports branch - channel = ask_for_channel(args) - pmb.config.pmaports.switch_to_channel_branch(args, channel) - cfg["pmbootstrap"]["is_default_channel"] = "False" - - # Copy the git hooks if master was checked out. (Don't symlink them and - # only do it on master, so the git hooks don't change unexpectedly when - # having a random branch checked out.) - branch_current = pmb.helpers.git.rev_parse(args, args.aports, - extra_args=["--abbrev-ref"]) - if branch_current == "master": - logging.info("NOTE: pmaports is on master branch, copying git hooks.") - pmb.config.pmaports.install_githooks(args) - - # Device - device, device_exists, kernel, nonfree = ask_for_device(args) - cfg["pmbootstrap"]["device"] = device - cfg["pmbootstrap"]["kernel"] = kernel - cfg["pmbootstrap"]["nonfree_firmware"] = str(nonfree["firmware"]) - cfg["pmbootstrap"]["nonfree_userland"] = str(nonfree["userland"]) - - info = pmb.parse.deviceinfo(args, device) - apkbuild_path = pmb.helpers.devices.find_path(args, device, 'APKBUILD') - if apkbuild_path: - apkbuild = pmb.parse.apkbuild(apkbuild_path) - ask_for_provider_select(args, apkbuild, cfg["providers"]) - - # Device keymap - if device_exists: - cfg["pmbootstrap"]["keymap"] = ask_for_keymaps(args, info) - - cfg["pmbootstrap"]["user"] = ask_for_username(args) - ask_for_provider_select_pkg(args, "postmarketos-base", cfg["providers"]) - ask_for_provider_select_pkg(args, "postmarketos-base-ui", cfg["providers"]) - - # UI and various build options - ui = ask_for_ui(args, info) - cfg["pmbootstrap"]["ui"] = ui - cfg["pmbootstrap"]["ui_extras"] = str(ask_for_ui_extras(args, ui)) - ask_for_provider_select_pkg(args, f"postmarketos-ui-{ui}", - cfg["providers"]) - ask_for_additional_options(args, cfg) - - # Extra packages to be installed to rootfs - logging.info("Additional packages that will be installed to rootfs." - " Specify them in a comma separated list (e.g.: vim,file)" - " or \"none\"") - extra = pmb.helpers.cli.ask("Extra packages", None, - args.extra_packages, - validation_regex=r"^([-.+\w]+)(,[-.+\w]+)*$") - cfg["pmbootstrap"]["extra_packages"] = extra - - # Configure timezone info - cfg["pmbootstrap"]["timezone"] = ask_for_timezone(args) - - # Locale - cfg["pmbootstrap"]["locale"] = ask_for_locale(args) - - # Hostname - cfg["pmbootstrap"]["hostname"] = ask_for_hostname(args, device) - - # SSH keys - cfg["pmbootstrap"]["ssh_keys"] = str(ask_for_ssh_keys(args)) - - # pmaports path (if users change it with: 'pmbootstrap --aports=... init') - cfg["pmbootstrap"]["aports"] = args.aports - - # Build outdated packages in pmbootstrap install - cfg["pmbootstrap"]["build_pkgs_on_install"] = str( - ask_build_pkgs_on_install(args)) - - # Save config - pmb.config.save(args, cfg) - - # Zap existing chroots - if (work_exists and device_exists and - len(glob.glob(args.work + "/chroot_*")) and - pmb.helpers.cli.confirm( - args, "Zap existing chroots to apply configuration?", - default=True)): - setattr(args, "deviceinfo", info) - - # Do not zap any existing packages or cache_http directories - pmb.chroot.zap(args, confirm=False) - - logging.info("WARNING: The chroots and git repositories in the work dir do" - " not get updated automatically.") - logging.info("Run 'pmbootstrap status' once a day before working with" - " pmbootstrap to make sure that everything is up-to-date.") - logging.info("DONE!") diff --git a/pmb/config/load.py b/pmb/config/load.py deleted file mode 100644 index f67215f3..00000000 --- a/pmb/config/load.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import configparser -import os -import pmb.config - - -def load(args): - cfg = configparser.ConfigParser() - if os.path.isfile(args.config): - cfg.read(args.config) - - if "pmbootstrap" not in cfg: - cfg["pmbootstrap"] = {} - if "providers" not in cfg: - cfg["providers"] = {} - - for key in pmb.config.defaults: - if key in pmb.config.config_keys and key not in cfg["pmbootstrap"]: - cfg["pmbootstrap"][key] = str(pmb.config.defaults[key]) - - # We used to save default values in the config, which can *not* be - # configured in "pmbootstrap init". That doesn't make sense, we always - # want to use the defaults from pmb/config/__init__.py in that case, - # not some outdated version we saved some time back (eg. aports folder, - # postmarketOS binary packages mirror). - if key not in pmb.config.config_keys and key in cfg["pmbootstrap"]: - logging.debug("Ignored unconfigurable and possibly outdated" - " default value from config:" - f" {cfg['pmbootstrap'][key]}") - del cfg["pmbootstrap"][key] - - return cfg diff --git a/pmb/config/merge_with_args.py b/pmb/config/merge_with_args.py deleted file mode 100644 index 262a28f0..00000000 --- a/pmb/config/merge_with_args.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import pmb.config - - -def merge_with_args(args): - """ - We have the internal config (pmb/config/__init__.py) and the user config - (usually ~/.config/pmbootstrap.cfg, can be changed with the '-c' - parameter). - - Args holds the variables parsed from the commandline (e.g. -j fills out - args.jobs), and values specified on the commandline count the most. - - In case it is not specified on the commandline, for the keys in - pmb.config.config_keys, we look into the value set in the the user config. - - When that is empty as well (e.g. just before pmbootstrap init), or the key - is not in pmb.config_keys, we use the default value from the internal - config. - """ - # Use defaults from the user's config file - cfg = pmb.config.load(args) - for key in cfg["pmbootstrap"]: - if key not in args or getattr(args, key) is None: - value = cfg["pmbootstrap"][key] - if key in pmb.config.defaults: - default = pmb.config.defaults[key] - if isinstance(default, bool): - value = (value.lower() == "true") - setattr(args, key, value) - setattr(args, 'selected_providers', cfg['providers']) - - # Use defaults from pmb.config.defaults - for key, value in pmb.config.defaults.items(): - if key not in args or getattr(args, key) is None: - setattr(args, key, value) diff --git a/pmb/config/pmaports.py b/pmb/config/pmaports.py deleted file mode 100644 index 29678964..00000000 --- a/pmb/config/pmaports.py +++ /dev/null @@ -1,208 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import configparser -import logging -import os -import sys - -import pmb.config -import pmb.helpers.git -import pmb.helpers.pmaports - - -def check_legacy_folder(): - # Existing pmbootstrap/aports must be a symlink - link = pmb.config.pmb_src + "/aports" - if os.path.exists(link) and not os.path.islink(link): - raise RuntimeError("The path '" + link + "' should be a" - " symlink pointing to the new pmaports" - " repository, which was split from the" - " pmbootstrap repository (#383). Consider" - " making a backup of that folder, then delete" - " it and run 'pmbootstrap init' again to let" - " pmbootstrap clone the pmaports repository and" - " set up the symlink.") - - -def clone(args): - logging.info("Setting up the native chroot and cloning the package build" - " recipes (pmaports)...") - - # Set up the native chroot and clone pmaports - pmb.helpers.git.clone(args, "pmaports") - - -def symlink(args): - # Create the symlink - # This won't work when pmbootstrap was installed system wide, but that's - # okay since the symlink is only intended to make the migration to the - # pmaports repository easier. - link = pmb.config.pmb_src + "/aports" - try: - os.symlink(args.aports, link) - logging.info("NOTE: pmaports path: " + link) - except: - logging.info("NOTE: pmaports path: " + args.aports) - - -def check_version_pmaports(real): - # Compare versions - min = pmb.config.pmaports_min_version - if pmb.parse.version.compare(real, min) >= 0: - return - - # Outated error - logging.info("NOTE: your pmaports folder has version " + real + ", but" + - " version " + min + " is required.") - raise RuntimeError("Run 'pmbootstrap pull' to update your pmaports.") - - -def check_version_pmbootstrap(min): - # Compare versions - real = pmb.__version__ - if pmb.parse.version.compare(real, min) >= 0: - return - - # Show versions - logging.info("NOTE: you are using pmbootstrap version " + real + ", but" + - " version " + min + " is required.") - - # Error for git clone - pmb_src = pmb.config.pmb_src - if os.path.exists(pmb_src + "/.git"): - raise RuntimeError("Please update your local pmbootstrap repository." - " Usually with: 'git -C \"" + pmb_src + "\" pull'") - - # Error for package manager installation - raise RuntimeError("Please update your pmbootstrap version (with your" - " distribution's package manager, or with pip, " - " depending on how you have installed it). If that is" - " not possible, consider cloning the latest version" - " of pmbootstrap from git.") - - -def read_config(args): - """ Read and verify pmaports.cfg. """ - # Try cache first - cache_key = "pmb.config.pmaports.read_config" - if pmb.helpers.other.cache[cache_key]: - return pmb.helpers.other.cache[cache_key] - - # Migration message - if not os.path.exists(args.aports): - logging.error(f"ERROR: pmaports dir not found: {args.aports}") - logging.error("Did you run 'pmbootstrap init'?") - sys.exit(1) - - # Require the config - path_cfg = args.aports + "/pmaports.cfg" - if not os.path.exists(path_cfg): - raise RuntimeError("Invalid pmaports repository, could not find the" - " config: " + path_cfg) - - # Load the config - cfg = configparser.ConfigParser() - cfg.read(path_cfg) - ret = cfg["pmaports"] - - # Version checks - check_version_pmaports(ret["version"]) - check_version_pmbootstrap(ret["pmbootstrap_min_version"]) - - # Translate legacy channel names - ret["channel"] = pmb.helpers.pmaports.get_channel_new(ret["channel"]) - - # Cache and return - pmb.helpers.other.cache[cache_key] = ret - return ret - - -def read_config_channel(args): - """ Get the properties of the currently active channel in pmaports.git, - as specified in channels.cfg (https://postmarketos.org/channels.cfg). - :returns: {"description: ..., - "branch_pmaports": ..., - "branch_aports": ..., - "mirrordir_alpine": ...} """ - channel = read_config(args)["channel"] - channels_cfg = pmb.helpers.git.parse_channels_cfg(args) - - if channel in channels_cfg["channels"]: - return channels_cfg["channels"][channel] - - # Channel not in channels.cfg, try to be helpful - branch = pmb.helpers.git.rev_parse(args, args.aports, - extra_args=["--abbrev-ref"]) - branches_official = pmb.helpers.git.get_branches_official(args, "pmaports") - branches_official = ", ".join(branches_official) - remote = pmb.helpers.git.get_upstream_remote(args, "pmaports") - logging.info("NOTE: fix the error by rebasing or cherry picking relevant" - " commits from this branch onto a branch that is on a" - f" supported channel: {branches_official}") - logging.info("NOTE: as workaround, you may pass --config-channels with a" - " custom channels.cfg. Reference:" - " https://postmarketos.org/channels.cfg") - raise RuntimeError(f"Current branch '{branch}' of pmaports.git is on" - f" channel '{channel}', but this channel was not" - f" found in channels.cfg (of {remote}/master" - " branch). Looks like a very old branch.") - - -def init(args): - check_legacy_folder() - if not os.path.exists(args.aports): - clone(args) - symlink(args) - read_config(args) - - -def switch_to_channel_branch(args, channel_new): - """ Checkout the channel's branch in pmaports.git. - :channel_new: channel name (e.g. "edge", "v21.03") - :returns: True if another branch was checked out, False otherwise """ - # Check current pmaports branch channel - channel_current = read_config(args)["channel"] - if channel_current == channel_new: - return False - - # List current and new branches/channels - channels_cfg = pmb.helpers.git.parse_channels_cfg(args) - branch_new = channels_cfg["channels"][channel_new]["branch_pmaports"] - branch_current = pmb.helpers.git.rev_parse(args, args.aports, - extra_args=["--abbrev-ref"]) - logging.info(f"Currently checked out branch '{branch_current}' of" - f" pmaports.git is on channel '{channel_current}'.") - logging.info(f"Switching to branch '{branch_new}' on channel" - f" '{channel_new}'...") - - # Make sure we don't have mounts related to the old channel - pmb.chroot.shutdown(args) - - # Attempt to switch branch (git gives a nice error message, mentioning - # which files need to be committed/stashed, so just pass it through) - if pmb.helpers.run.user(args, ["git", "checkout", branch_new], - args.aports, "interactive", check=False): - raise RuntimeError("Failed to switch branch. Go to your pmaports and" - " fix what git complained about, then try again: " - f"{args.aports}") - - # Invalidate all caches - pmb.helpers.other.init_cache() - - # Verify pmaports.cfg on new branch - read_config(args) - return True - - -def install_githooks(args): - hooks_dir = os.path.join(args.aports, ".githooks") - if not os.path.exists(hooks_dir): - logging.info("No .githooks dir found") - return - for h in os.listdir(hooks_dir): - src = os.path.join(hooks_dir, h) - # Use git default hooks dir so users can ignore our hooks - # if they dislike them by setting "core.hooksPath" git config - dst = os.path.join(args.aports, ".git", "hooks", h) - if pmb.helpers.run.user(args, ["cp", src, dst], check=False): - logging.warning(f"WARNING: Copying git hook failed: {dst}") diff --git a/pmb/config/save.py b/pmb/config/save.py deleted file mode 100644 index e10821a7..00000000 --- a/pmb/config/save.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import logging - - -def save(args, cfg): - logging.debug("Save config: " + args.config) - os.makedirs(os.path.dirname(args.config), 0o700, True) - with open(args.config, "w") as handle: - cfg.write(handle) diff --git a/pmb/config/sudo.py b/pmb/config/sudo.py deleted file mode 100644 index baa5963e..00000000 --- a/pmb/config/sudo.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2023 Anjandev Momi -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import shutil -from functools import lru_cache -from typing import Optional - - -@lru_cache() -def which_sudo() -> Optional[str]: - """Returns a command required to run commands as root, if any. - - Find whether sudo or doas is installed for commands that require root. - Allows user to override preferred sudo with PMB_SUDO env variable. - """ - - if os.getuid() == 0: - return None - - supported_sudos = ['doas', 'sudo'] - - user_set_sudo = os.getenv("PMB_SUDO") - if user_set_sudo is not None: - if shutil.which(user_set_sudo) is None: - raise RuntimeError("PMB_SUDO environmental variable is set to" - f" {user_set_sudo} but pmbootstrap cannot find" - " this command on your system.") - return user_set_sudo - - for sudo in supported_sudos: - if shutil.which(sudo) is not None: - return sudo - - raise RuntimeError("Can't find sudo or doas required to run pmbootstrap." - " Please install sudo, doas, or specify your own sudo" - " with the PMB_SUDO environmental variable.") diff --git a/pmb/config/workdir.py b/pmb/config/workdir.py deleted file mode 100644 index 4a32c3fb..00000000 --- a/pmb/config/workdir.py +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -""" Save, read, verify workdir state related information in $WORK/workdir.cfg, - for example the init dates of the chroots. This is not saved in - pmbootstrap.cfg, because pmbootstrap.cfg is not tied to a specific work - dir. """ -import configparser -import os -import time - -import pmb.config -import pmb.config.pmaports - - -def chroot_save_init(args, suffix): - """ Save the chroot initialization data in $WORK/workdir.cfg. """ - # Read existing cfg - cfg = configparser.ConfigParser() - path = args.work + "/workdir.cfg" - if os.path.isfile(path): - cfg.read(path) - - # Create sections - for key in ["chroot-init-dates", "chroot-channels"]: - if key not in cfg: - cfg[key] = {} - - # Update sections - channel = pmb.config.pmaports.read_config(args)["channel"] - cfg["chroot-channels"][suffix] = channel - cfg["chroot-init-dates"][suffix] = str(int(time.time())) - - # Write back - with open(path, "w") as handle: - cfg.write(handle) - - -def chroots_outdated(args): - """ Check if init dates from workdir.cfg indicate that any chroot is - outdated. - :returns: True if any of the chroots are outdated and should be zapped, - False otherwise """ - # Skip if workdir.cfg doesn't exist - path = args.work + "/workdir.cfg" - if not os.path.exists(path): - return False - - cfg = configparser.ConfigParser() - cfg.read(path) - key = "chroot-init-dates" - if key not in cfg: - return False - - date_outdated = time.time() - pmb.config.chroot_outdated - for suffix in cfg[key]: - date_init = int(cfg[key][suffix]) - if date_init <= date_outdated: - return True - return False - - -def chroot_check_channel(args, suffix): - path = args.work + "/workdir.cfg" - msg_again = "Run 'pmbootstrap zap' to delete your chroots and try again." - msg_unknown = ("Could not figure out on which release channel the" - f" '{suffix}' chroot is.") - if not os.path.exists(path): - raise RuntimeError(f"{msg_unknown} {msg_again}") - - cfg = configparser.ConfigParser() - cfg.read(path) - key = "chroot-channels" - if key not in cfg or suffix not in cfg[key]: - raise RuntimeError(f"{msg_unknown} {msg_again}") - - channel = pmb.config.pmaports.read_config(args)["channel"] - channel_cfg = cfg[key][suffix] - if channel != channel_cfg: - raise RuntimeError(f"Chroot '{suffix}' was created for the" - f" '{channel_cfg}' channel, but you are on the" - f" '{channel}' channel now. {msg_again}") - - -def clean(args): - """ Remove obsolete data data from workdir.cfg. - :returns: None if workdir does not exist, - True if config was rewritten, - False if config did not change """ - # Skip if workdir.cfg doesn't exist - path = args.work + "/workdir.cfg" - if not os.path.exists(path): - return None - - # Read - cfg = configparser.ConfigParser() - cfg.read(path) - - # Remove entries for deleted chroots - changed = False - for key in ["chroot-init-dates", "chroot-channels"]: - if key not in cfg: - continue - for suffix in cfg[key]: - path_suffix = args.work + "/chroot_" + suffix - if os.path.exists(path_suffix): - continue - changed = True - del cfg[key][suffix] - - # Write back - if changed: - with open(path, "w") as handle: - cfg.write(handle) - - return changed diff --git a/pmb/data/keys/README b/pmb/data/keys/README deleted file mode 100644 index 2b716108..00000000 --- a/pmb/data/keys/README +++ /dev/null @@ -1,9 +0,0 @@ -All Alpine Linux keys are stored here, so we can verify the downloaded files with pmbootstrap before APK itself is verified. - -Sources for the keys (must be identical, there's a testcase that verifies this): - https://github.com/alpinelinux/aports/tree/master/main/alpine-keys - http://git.alpinelinux.org/cgit/aports/tree/main/alpine-keys?h=master - alpine-keys package - -In addition, this key holds keys for the official postmarketOS repository: - https://build.postmarketos.org diff --git a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-4a6a0840.rsa.pub b/pmb/data/keys/alpine-devel@lists.alpinelinux.org-4a6a0840.rsa.pub deleted file mode 100644 index bb4bdc80..00000000 --- a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-4a6a0840.rsa.pub +++ /dev/null @@ -1,9 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1yHJxQgsHQREclQu4Ohe -qxTxd1tHcNnvnQTu/UrTky8wWvgXT+jpveroeWWnzmsYlDI93eLI2ORakxb3gA2O -Q0Ry4ws8vhaxLQGC74uQR5+/yYrLuTKydFzuPaS1dK19qJPXB8GMdmFOijnXX4SA -jixuHLe1WW7kZVtjL7nufvpXkWBGjsfrvskdNA/5MfxAeBbqPgaq0QMEfxMAn6/R -L5kNepi/Vr4S39Xvf2DzWkTLEK8pcnjNkt9/aafhWqFVW7m3HCAII6h/qlQNQKSo -GuH34Q8GsFG30izUENV9avY7hSLq7nggsvknlNBZtFUcmGoQrtx3FmyYsIC8/R+B -ywIDAQAB ------END PUBLIC KEY----- diff --git a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-5243ef4b.rsa.pub b/pmb/data/keys/alpine-devel@lists.alpinelinux.org-5243ef4b.rsa.pub deleted file mode 100644 index 6cbfad74..00000000 --- a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-5243ef4b.rsa.pub +++ /dev/null @@ -1,9 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvNijDxJ8kloskKQpJdx+ -mTMVFFUGDoDCbulnhZMJoKNkSuZOzBoFC94omYPtxnIcBdWBGnrm6ncbKRlR+6oy -DO0W7c44uHKCFGFqBhDasdI4RCYP+fcIX/lyMh6MLbOxqS22TwSLhCVjTyJeeH7K -aA7vqk+QSsF4TGbYzQDDpg7+6aAcNzg6InNePaywA6hbT0JXbxnDWsB+2/LLSF2G -mnhJlJrWB1WGjkz23ONIWk85W4S0XB/ewDefd4Ly/zyIciastA7Zqnh7p3Ody6Q0 -sS2MJzo7p3os1smGjUF158s6m/JbVh4DN6YIsxwl2OjDOz9R0OycfJSDaBVIGZzg -cQIDAQAB ------END PUBLIC KEY----- diff --git a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-524d27bb.rsa.pub b/pmb/data/keys/alpine-devel@lists.alpinelinux.org-524d27bb.rsa.pub deleted file mode 100644 index 1d34c93e..00000000 --- a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-524d27bb.rsa.pub +++ /dev/null @@ -1,9 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr8s1q88XpuJWLCZALdKj -lN8wg2ePB2T9aIcaxryYE/Jkmtu+ZQ5zKq6BT3y/udt5jAsMrhHTwroOjIsF9DeG -e8Y3vjz+Hh4L8a7hZDaw8jy3CPag47L7nsZFwQOIo2Cl1SnzUc6/owoyjRU7ab0p -iWG5HK8IfiybRbZxnEbNAfT4R53hyI6z5FhyXGS2Ld8zCoU/R4E1P0CUuXKEN4p0 -64dyeUoOLXEWHjgKiU1mElIQj3k/IF02W89gDj285YgwqA49deLUM7QOd53QLnx+ -xrIrPv3A+eyXMFgexNwCKQU9ZdmWa00MjjHlegSGK8Y2NPnRoXhzqSP9T9i2HiXL -VQIDAQAB ------END PUBLIC KEY----- diff --git a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-5261cecb.rsa.pub b/pmb/data/keys/alpine-devel@lists.alpinelinux.org-5261cecb.rsa.pub deleted file mode 100644 index 83f0658e..00000000 --- a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-5261cecb.rsa.pub +++ /dev/null @@ -1,9 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwlzMkl7b5PBdfMzGdCT0 -cGloRr5xGgVmsdq5EtJvFkFAiN8Ac9MCFy/vAFmS8/7ZaGOXoCDWbYVLTLOO2qtX -yHRl+7fJVh2N6qrDDFPmdgCi8NaE+3rITWXGrrQ1spJ0B6HIzTDNEjRKnD4xyg4j -g01FMcJTU6E+V2JBY45CKN9dWr1JDM/nei/Pf0byBJlMp/mSSfjodykmz4Oe13xB -Ca1WTwgFykKYthoLGYrmo+LKIGpMoeEbY1kuUe04UiDe47l6Oggwnl+8XD1MeRWY -sWgj8sF4dTcSfCMavK4zHRFFQbGp/YFJ/Ww6U9lA3Vq0wyEI6MCMQnoSMFwrbgZw -wwIDAQAB ------END PUBLIC KEY----- diff --git a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-58199dcc.rsa.pub b/pmb/data/keys/alpine-devel@lists.alpinelinux.org-58199dcc.rsa.pub deleted file mode 100644 index 2b99a0d1..00000000 --- a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-58199dcc.rsa.pub +++ /dev/null @@ -1,9 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3v8/ye/V/t5xf4JiXLXa -hWFRozsnmn3hobON20GdmkrzKzO/eUqPOKTpg2GtvBhK30fu5oY5uN2ORiv2Y2ht -eLiZ9HVz3XP8Fm9frha60B7KNu66FO5P2o3i+E+DWTPqqPcCG6t4Znk2BypILcit -wiPKTsgbBQR2qo/cO01eLLdt6oOzAaF94NH0656kvRewdo6HG4urbO46tCAizvCR -CA7KGFMyad8WdKkTjxh8YLDLoOCtoZmXmQAiwfRe9pKXRH/XXGop8SYptLqyVVQ+ -tegOD9wRs2tOlgcLx4F/uMzHN7uoho6okBPiifRX+Pf38Vx+ozXh056tjmdZkCaV -aQIDAQAB ------END PUBLIC KEY----- diff --git a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-58cbb476.rsa.pub b/pmb/data/keys/alpine-devel@lists.alpinelinux.org-58cbb476.rsa.pub deleted file mode 100644 index a9ead55e..00000000 --- a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-58cbb476.rsa.pub +++ /dev/null @@ -1,9 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAoSPnuAGKtRIS5fEgYPXD -8pSGvKAmIv3A08LBViDUe+YwhilSHbYXUEAcSH1KZvOo1WT1x2FNEPBEFEFU1Eyc -+qGzbA03UFgBNvArurHQ5Z/GngGqE7IarSQFSoqewYRtFSfp+TL9CUNBvM0rT7vz -2eMu3/wWG+CBmb92lkmyWwC1WSWFKO3x8w+Br2IFWvAZqHRt8oiG5QtYvcZL6jym -Y8T6sgdDlj+Y+wWaLHs9Fc+7vBuyK9C4O1ORdMPW15qVSl4Lc2Wu1QVwRiKnmA+c -DsH/m7kDNRHM7TjWnuj+nrBOKAHzYquiu5iB3Qmx+0gwnrSVf27Arc3ozUmmJbLj -zQIDAQAB ------END PUBLIC KEY----- diff --git a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-58e4f17d.rsa.pub b/pmb/data/keys/alpine-devel@lists.alpinelinux.org-58e4f17d.rsa.pub deleted file mode 100644 index 8f990949..00000000 --- a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-58e4f17d.rsa.pub +++ /dev/null @@ -1,9 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvBxJN9ErBgdRcPr5g4hV -qyUSGZEKuvQliq2Z9SRHLh2J43+EdB6A+yzVvLnzcHVpBJ+BZ9RV30EM9guck9sh -r+bryZcRHyjG2wiIEoduxF2a8KeWeQH7QlpwGhuobo1+gA8L0AGImiA6UP3LOirl -I0G2+iaKZowME8/tydww4jx5vG132JCOScMjTalRsYZYJcjFbebQQolpqRaGB4iG -WqhytWQGWuKiB1A22wjmIYf3t96l1Mp+FmM2URPxD1gk/BIBnX7ew+2gWppXOK9j -1BJpo0/HaX5XoZ/uMqISAAtgHZAqq+g3IUPouxTphgYQRTRYpz2COw3NF43VYQrR -bQIDAQAB ------END PUBLIC KEY----- diff --git a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-5e69ca50.rsa.pub b/pmb/data/keys/alpine-devel@lists.alpinelinux.org-5e69ca50.rsa.pub deleted file mode 100644 index 097740eb..00000000 --- a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-5e69ca50.rsa.pub +++ /dev/null @@ -1,9 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwXEJ8uVwJPODshTkf2BH -pH5fVVDppOa974+IQJsZDmGd3Ny0dcd+WwYUhNFUW3bAfc3/egaMWCaprfaHn+oS -4ddbOFgbX8JCHdru/QMAAU0aEWSMybfJGA569c38fNUF/puX6XK/y0lD2SS3YQ/a -oJ5jb5eNrQGR1HHMAd0G9WC4JeZ6WkVTkrcOw55F00aUPGEjejreXBerhTyFdabo -dSfc1TILWIYD742Lkm82UBOPsOSdSfOdsMOOkSXxhdCJuCQQ70DHkw7Epy9r+X33 -ybI4r1cARcV75OviyhD8CFhAlapLKaYnRFqFxlA515e6h8i8ih/v3MSEW17cCK0b -QwIDAQAB ------END PUBLIC KEY----- diff --git a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-60ac2099.rsa.pub b/pmb/data/keys/alpine-devel@lists.alpinelinux.org-60ac2099.rsa.pub deleted file mode 100644 index 2b8a4a93..00000000 --- a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-60ac2099.rsa.pub +++ /dev/null @@ -1,9 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwR4uJVtJOnOFGchnMW5Y -j5/waBdG1u5BTMlH+iQMcV5+VgWhmpZHJCBz3ocD+0IGk2I68S5TDOHec/GSC0lv -6R9o6F7h429GmgPgVKQsc8mPTPtbjJMuLLs4xKc+viCplXc0Nc0ZoHmCH4da6fCV -tdpHQjVe6F9zjdquZ4RjV6R6JTiN9v924dGMAkbW/xXmamtz51FzondKC52Gh8Mo -/oA0/T0KsCMCi7tb4QNQUYrf+Xcha9uus4ww1kWNZyfXJB87a2kORLiWMfs2IBBJ -TmZ2Fnk0JnHDb8Oknxd9PvJPT0mvyT8DA+KIAPqNvOjUXP4bnjEHJcoCP9S5HkGC -IQIDAQAB ------END PUBLIC KEY----- diff --git a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-6165ee59.rsa.pub b/pmb/data/keys/alpine-devel@lists.alpinelinux.org-6165ee59.rsa.pub deleted file mode 100644 index f2165aeb..00000000 --- a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-6165ee59.rsa.pub +++ /dev/null @@ -1,14 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAutQkua2CAig4VFSJ7v54 -ALyu/J1WB3oni7qwCZD3veURw7HxpNAj9hR+S5N/pNeZgubQvJWyaPuQDm7PTs1+ -tFGiYNfAsiibX6Rv0wci3M+z2XEVAeR9Vzg6v4qoofDyoTbovn2LztaNEjTkB+oK -tlvpNhg1zhou0jDVYFniEXvzjckxswHVb8cT0OMTKHALyLPrPOJzVtM9C1ew2Nnc -3848xLiApMu3NBk0JqfcS3Bo5Y2b1FRVBvdt+2gFoKZix1MnZdAEZ8xQzL/a0YS5 -Hd0wj5+EEKHfOd3A75uPa/WQmA+o0cBFfrzm69QDcSJSwGpzWrD1ScH3AK8nWvoj -v7e9gukK/9yl1b4fQQ00vttwJPSgm9EnfPHLAtgXkRloI27H6/PuLoNvSAMQwuCD -hQRlyGLPBETKkHeodfLoULjhDi1K2gKJTMhtbnUcAA7nEphkMhPWkBpgFdrH+5z4 -Lxy+3ek0cqcI7K68EtrffU8jtUj9LFTUC8dERaIBs7NgQ/LfDbDfGh9g6qVj1hZl -k9aaIPTm/xsi8v3u+0qaq7KzIBc9s59JOoA8TlpOaYdVgSQhHHLBaahOuAigH+VI -isbC9vmqsThF2QdDtQt37keuqoda2E6sL7PUvIyVXDRfwX7uMDjlzTxHTymvq2Ck -htBqojBnThmjJQFgZXocHG8CAwEAAQ== ------END PUBLIC KEY----- diff --git a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-61666e3f.rsa.pub b/pmb/data/keys/alpine-devel@lists.alpinelinux.org-61666e3f.rsa.pub deleted file mode 100644 index aa63d81d..00000000 --- a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-61666e3f.rsa.pub +++ /dev/null @@ -1,14 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAlEyxkHggKCXC2Wf5Mzx4 -nZLFZvU2bgcA3exfNPO/g1YunKfQY+Jg4fr6tJUUTZ3XZUrhmLNWvpvSwDS19ZmC -IXOu0+V94aNgnhMsk9rr59I8qcbsQGIBoHzuAl8NzZCgdbEXkiY90w1skUw8J57z -qCsMBydAueMXuWqF5nGtYbi5vHwK42PffpiZ7G5Kjwn8nYMW5IZdL6ZnMEVJUWC9 -I4waeKg0yskczYDmZUEAtrn3laX9677ToCpiKrvmZYjlGl0BaGp3cxggP2xaDbUq -qfFxWNgvUAb3pXD09JM6Mt6HSIJaFc9vQbrKB9KT515y763j5CC2KUsilszKi3mB -HYe5PoebdjS7D1Oh+tRqfegU2IImzSwW3iwA7PJvefFuc/kNIijfS/gH/cAqAK6z -bhdOtE/zc7TtqW2Wn5Y03jIZdtm12CxSxwgtCF1NPyEWyIxAQUX9ACb3M0FAZ61n -fpPrvwTaIIxxZ01L3IzPLpbc44x/DhJIEU+iDt6IMTrHOphD9MCG4631eIdB0H1b -6zbNX1CXTsafqHRFV9XmYYIeOMggmd90s3xIbEujA6HKNP/gwzO6CDJ+nHFDEqoF -SkxRdTkEqjTjVKieURW7Swv7zpfu5PrsrrkyGnsRrBJJzXlm2FOOxnbI2iSL1B5F -rO5kbUxFeZUIDq+7Yv4kLWcCAwEAAQ== ------END PUBLIC KEY----- diff --git a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-616a9724.rsa.pub b/pmb/data/keys/alpine-devel@lists.alpinelinux.org-616a9724.rsa.pub deleted file mode 100644 index 59c330e9..00000000 --- a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-616a9724.rsa.pub +++ /dev/null @@ -1,14 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnC+bR4bHf/L6QdU4puhQ -gl1MHePszRC38bzvVFDUJsmCaMCL2suCs2A2yxAgGb9pu9AJYLAmxQC4mM3jNqhg -/E7yuaBbek3O02zN/ctvflJ250wZCy+z0ZGIp1ak6pu1j14IwHokl9j36zNfGtfv -ADVOcdpWITFFlPqwq1qt/H3UsKVmtiF3BNWWTeUEQwKvlU8ymxgS99yn0+4OPyNT -L3EUeS+NQJtDS01unau0t7LnjUXn+XIneWny8bIYOQCuVR6s/gpIGuhBaUqwaJOw -7jkJZYF2Ij7uPb4b5/R3vX2FfxxqEHqssFSg8FFUNTZz3qNZs0CRVyfA972g9WkJ -hPfn31pQYil4QGRibCMIeU27YAEjXoqfJKEPh4UWMQsQLrEfdGfb8VgwrPbniGfU -L3jKJR3VAafL9330iawzVQDlIlwGl6u77gEXMl9K0pfazunYhAp+BMP+9ot5ckK+ -osmrqj11qMESsAj083GeFdfV3pXEIwUytaB0AKEht9DbqUfiE/oeZ/LAXgySMtVC -sbC4ESmgVeY2xSBIJdDyUap7FR49GGrw0W49NUv9gRgQtGGaNVQQO9oGL2PBC41P -iWF9GLoX30HIz1P8PF/cZvicSSPkQf2Z6TV+t0ebdGNS5DjapdnCrq8m9Z0pyKsQ -uxAL2a7zX8l5i1CZh1ycUGsCAwEAAQ== ------END PUBLIC KEY----- diff --git a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-616abc23.rsa.pub b/pmb/data/keys/alpine-devel@lists.alpinelinux.org-616abc23.rsa.pub deleted file mode 100644 index 915bc566..00000000 --- a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-616abc23.rsa.pub +++ /dev/null @@ -1,14 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA0MfCDrhODRCIxR9Dep1s -eXafh5CE5BrF4WbCgCsevyPIdvTeyIaW4vmO3bbG4VzhogDZju+R3IQYFuhoXP5v -Y+zYJGnwrgz3r5wYAvPnLEs1+dtDKYOgJXQj+wLJBW1mzRDL8FoRXOe5iRmn1EFS -wZ1DoUvyu7/J5r0itKicZp3QKED6YoilXed+1vnS4Sk0mzN4smuMR9eO1mMCqNp9 -9KTfRDHTbakIHwasECCXCp50uXdoW6ig/xUAFanpm9LtK6jctNDbXDhQmgvAaLXZ -LvFqoaYJ/CvWkyYCgL6qxvMvVmPoRv7OPcyni4xR/WgWa0MSaEWjgPx3+yj9fiMA -1S02pFWFDOr5OUF/O4YhFJvUCOtVsUPPfA/Lj6faL0h5QI9mQhy5Zb9TTaS9jB6p -Lw7u0dJlrjFedk8KTJdFCcaGYHP6kNPnOxMylcB/5WcztXZVQD5WpCicGNBxCGMm -W64SgrV7M07gQfL/32QLsdqPUf0i8hoVD8wfQ3EpbQzv6Fk1Cn90bZqZafg8XWGY -wddhkXk7egrr23Djv37V2okjzdqoyLBYBxMz63qQzFoAVv5VoY2NDTbXYUYytOvG -GJ1afYDRVWrExCech1mX5ZVUB1br6WM+psFLJFoBFl6mDmiYt0vMYBddKISsvwLl -IJQkzDwtXzT2cSjoj3T5QekCAwEAAQ== ------END PUBLIC KEY----- diff --git a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-616ac3bc.rsa.pub b/pmb/data/keys/alpine-devel@lists.alpinelinux.org-616ac3bc.rsa.pub deleted file mode 100644 index 1e49d246..00000000 --- a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-616ac3bc.rsa.pub +++ /dev/null @@ -1,14 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvaaoSLab+IluixwKV5Od -0gib2YurjPatGIbn5Ov2DLUFYiebj2oJINXJSwUOO+4WcuHFEqiL/1rya+k5hLZt -hnPL1tn6QD4rESznvGSasRCQNT2vS/oyZbTYJRyAtFkEYLlq0t3S3xBxxHWuvIf0 -qVxVNYpQWyM3N9RIeYBR/euXKJXileSHk/uq1I5wTC0XBIHWcthczGN0m9wBEiWS -0m3cnPk4q0Ea8mUJ91Rqob19qETz6VbSPYYpZk3qOycjKosuwcuzoMpwU8KRiMFd -5LHtX0Hx85ghGsWDVtS0c0+aJa4lOMGvJCAOvDfqvODv7gKlCXUpgumGpLdTmaZ8 -1RwqspAe3IqBcdKTqRD4m2mSg23nVx2FAY3cjFvZQtfooT7q1ItRV5RgH6FhQSl7 -+6YIMJ1Bf8AAlLdRLpg+doOUGcEn+pkDiHFgI8ylH1LKyFKw+eXaAml/7DaWZk1d -dqggwhXOhc/UUZFQuQQ8A8zpA13PcbC05XxN2hyP93tCEtyynMLVPtrRwDnHxFKa -qKzs3rMDXPSXRn3ZZTdKH3069ApkEjQdpcwUh+EmJ1Ve/5cdtzT6kKWCjKBFZP/s -91MlRrX2BTRdHaU5QJkUheUtakwxuHrdah2F94lRmsnQlpPr2YseJu6sIE+Dnx4M -CfhdVbQL2w54R645nlnohu8CAwEAAQ== ------END PUBLIC KEY----- diff --git a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-616adfeb.rsa.pub b/pmb/data/keys/alpine-devel@lists.alpinelinux.org-616adfeb.rsa.pub deleted file mode 100644 index bb15efe9..00000000 --- a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-616adfeb.rsa.pub +++ /dev/null @@ -1,14 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAq0BFD1D4lIxQcsqEpQzU -pNCYM3aP1V/fxxVdT4DWvSI53JHTwHQamKdMWtEXetWVbP5zSROniYKFXd/xrD9X -0jiGHey3lEtylXRIPxe5s+wXoCmNLcJVnvTcDtwx/ne2NLHxp76lyc25At+6RgE6 -ADjLVuoD7M4IFDkAsd8UQ8zM0Dww9SylIk/wgV3ZkifecvgUQRagrNUdUjR56EBZ -raQrev4hhzOgwelT0kXCu3snbUuNY/lU53CoTzfBJ5UfEJ5pMw1ij6X0r5S9IVsy -KLWH1hiO0NzU2c8ViUYCly4Fe9xMTFc6u2dy/dxf6FwERfGzETQxqZvSfrRX+GLj -/QZAXiPg5178hT/m0Y3z5IGenIC/80Z9NCi+byF1WuJlzKjDcF/TU72zk0+PNM/H -Kuppf3JT4DyjiVzNC5YoWJT2QRMS9KLP5iKCSThwVceEEg5HfhQBRT9M6KIcFLSs -mFjx9kNEEmc1E8hl5IR3+3Ry8G5/bTIIruz14jgeY9u5jhL8Vyyvo41jgt9sLHR1 -/J1TxKfkgksYev7PoX6/ZzJ1ksWKZY5NFoDXTNYUgzFUTOoEaOg3BAQKadb3Qbbq -XIrxmPBdgrn9QI7NCgfnAY3Tb4EEjs3ON/BNyEhUENcXOH6I1NbcuBQ7g9P73kE4 -VORdoc8MdJ5eoKBpO8Ww8HECAwEAAQ== ------END PUBLIC KEY----- diff --git a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-616ae350.rsa.pub b/pmb/data/keys/alpine-devel@lists.alpinelinux.org-616ae350.rsa.pub deleted file mode 100644 index 0ecbccc2..00000000 --- a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-616ae350.rsa.pub +++ /dev/null @@ -1,14 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAyduVzi1mWm+lYo2Tqt/0 -XkCIWrDNP1QBMVPrE0/ZlU2bCGSoo2Z9FHQKz/mTyMRlhNqTfhJ5qU3U9XlyGOPJ -piM+b91g26pnpXJ2Q2kOypSgOMOPA4cQ42PkHBEqhuzssfj9t7x47ppS94bboh46 -xLSDRff/NAbtwTpvhStV3URYkxFG++cKGGa5MPXBrxIp+iZf9GnuxVdST5PGiVGP -ODL/b69sPJQNbJHVquqUTOh5Ry8uuD2WZuXfKf7/C0jC/ie9m2+0CttNu9tMciGM -EyKG1/Xhk5iIWO43m4SrrT2WkFlcZ1z2JSf9Pjm4C2+HovYpihwwdM/OdP8Xmsnr -DzVB4YvQiW+IHBjStHVuyiZWc+JsgEPJzisNY0Wyc/kNyNtqVKpX6dRhMLanLmy+ -f53cCSI05KPQAcGj6tdL+D60uKDkt+FsDa0BTAobZ31OsFVid0vCXtsbplNhW1IF -HwsGXBTVcfXg44RLyL8Lk/2dQxDHNHzAUslJXzPxaHBLmt++2COa2EI1iWlvtznk -Ok9WP8SOAIj+xdqoiHcC4j72BOVVgiITIJNHrbppZCq6qPR+fgXmXa+sDcGh30m6 -9Wpbr28kLMSHiENCWTdsFij+NQTd5S47H7XTROHnalYDuF1RpS+DpQidT5tUimaT -JZDr++FjKrnnijbyNF8b98UCAwEAAQ== ------END PUBLIC KEY----- diff --git a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-616db30d.rsa.pub b/pmb/data/keys/alpine-devel@lists.alpinelinux.org-616db30d.rsa.pub deleted file mode 100644 index ceffa3ac..00000000 --- a/pmb/data/keys/alpine-devel@lists.alpinelinux.org-616db30d.rsa.pub +++ /dev/null @@ -1,14 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnpUpyWDWjlUk3smlWeA0 -lIMW+oJ38t92CRLHH3IqRhyECBRW0d0aRGtq7TY8PmxjjvBZrxTNDpJT6KUk4LRm -a6A6IuAI7QnNK8SJqM0DLzlpygd7GJf8ZL9SoHSH+gFsYF67Cpooz/YDqWrlN7Vw -tO00s0B+eXy+PCXYU7VSfuWFGK8TGEv6HfGMALLjhqMManyvfp8hz3ubN1rK3c8C -US/ilRh1qckdbtPvoDPhSbTDmfU1g/EfRSIEXBrIMLg9ka/XB9PvWRrekrppnQzP -hP9YE3x/wbFc5QqQWiRCYyQl/rgIMOXvIxhkfe8H5n1Et4VAorkpEAXdsfN8KSVv -LSMazVlLp9GYq5SUpqYX3KnxdWBgN7BJoZ4sltsTpHQ/34SXWfu3UmyUveWj7wp0 -x9hwsPirVI00EEea9AbP7NM2rAyu6ukcm4m6ATd2DZJIViq2es6m60AE6SMCmrQF -wmk4H/kdQgeAELVfGOm2VyJ3z69fQuywz7xu27S6zTKi05Qlnohxol4wVb6OB7qG -LPRtK9ObgzRo/OPumyXqlzAi/Yvyd1ZQk8labZps3e16bQp8+pVPiumWioMFJDWV -GZjCmyMSU8V6MB6njbgLHoyg2LCukCAeSjbPGGGYhnKLm1AKSoJh3IpZuqcKCk5C -8CM1S15HxV78s9dFntEqIokCAwEAAQ== ------END PUBLIC KEY----- diff --git a/pmb/data/keys/build.postmarketos.org.rsa.pub b/pmb/data/keys/build.postmarketos.org.rsa.pub deleted file mode 100644 index 227f6680..00000000 --- a/pmb/data/keys/build.postmarketos.org.rsa.pub +++ /dev/null @@ -1,14 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAlXE4h6kp8aCzn6BjuOnz -Z1lcFSY+WRZ2vGUb5hEZ+YG75xYZW+yELr8P8+HSUrpBXF/HTQYKH/cNJc5zmOny -EsmcZskIYB0qgZPg02GXBV8VfjL+Es+y166g14IH2YKkdfplqHYVpCmnkmaZXQZu -ZAOEL2hyuzTAYYFhCZWHYYgdWfpnKDDlVcnw1Q3/LhADBlN63CW0UMWc5oJ7MwlY -SKriM60cHvER1YD5bdbGG/JTBFnB4djliLtvRNp+w3emwHmDhFmnYITN2WOHbYpV -P+RIfzq0IP978sKGJmd0RPuwl2ruifBYikm/F79Ko8lT2gVE40B7wqbInIGquTr1 -deERIr47jcPRGl+01Svm0SxbXD1/zBNjo4wvMsZEW7Te689mFjxsXHnD7OEUQqh/ -D1DhbnKJwpKwClYOi/aVDyGJunqavSR0QYPqN3nP+uXdBC4wuLI9gRS/yXNaxpMy -9AKwlD3uUhKzmJMJIu3L1/TH/vY9M5xZ2lnFduTZmL/X/4sjhyTb1ycFwU9UIY4C -u2CP/YLqiiquokgzpRfYwJyOP4quBVRC46Tejx5PzTCvnhro7LAzDoS756iBv9E/ -2oxwC5VafGSChO+N8SyEBy532Gs+rcZDwUE7M9Y1GbQHW3ALMTwxstWqZ3GKWKd1 -Cw6JQoywUhR09tFwQrYZao0CAwEAAQ== ------END PUBLIC KEY----- diff --git a/pmb/data/locales b/pmb/data/locales deleted file mode 100644 index 8573d76b..00000000 --- a/pmb/data/locales +++ /dev/null @@ -1,304 +0,0 @@ -C -a_DJ -aa_ER -aa_ET -af_ZA -agr_PE -ak_GH -am_ET -an_ES -anp_IN -ar_AE -ar_BH -ar_DZ -ar_EG -ar_IN -ar_IQ -ar_JO -ar_KW -ar_LB -ar_LY -ar_MA -ar_OM -ar_QA -ar_SA -ar_SD -ar_SS -ar_SY -ar_TN -ar_YE -as_IN -ast_ES -ayc_PE -az_AZ -az_IR -be_BY -bem_ZM -ber_DZ -ber_MA -bg_BG -bhb_IN -bho_IN -bho_NP -bi_VU -bn_BD -bn_IN -bo_CN -bo_IN -br_FR -brx_IN -bs_BA -byn_ER -ca_AD -ca_ES -ca_FR -ca_IT -ce_RU -ch_DE -chr_US -cmn_TW -crh_UA -cs_CZ -csb_PL -cv_RU -cy_GB -da_DK -de_AT -de_BE -de_CH -de_DE -de_IT -de_LI -de_LU -doi_IN -dsb_DE -dv_MV -dz_BT -el_CY -el_GR -en_AG -en_AU -en_BW -en_CA -en_DK -en_GB -en_HK -en_IE -en_IL -en_IN -en_NG -en_NZ -en_PH -en_SC -en_SG -en_US -en_ZA -en_ZM -en_ZW -eo -es_AR -es_BO -es_CL -es_CO -es_CR -es_CU -es_DO -es_EC -es_ES -es_GT -es_HN -es_MX -es_NI -es_PA -es_PE -es_PR -es_PY -es_SV -es_US -es_UY -es_VE -et_EE -eu_ES -fa_IR -ff_SN -fi_FI -fil_PH -fo_FO -fr_BE -fr_CA -fr_CH -fr_FR -fr_LU -fur_IT -fy_DE -fy_NL -ga_IE -gd_GB -gez_ER -gez_ET -gl_ES -gu_IN -gv_GB -ha_NG -hak_TW -he_IL -hi_IN -hif_FJ -hne_IN -hr_HR -hsb_DE -ht_HT -hu_HU -hy_AM -ia_FR -id_ID -ig_NG -ik_CA -is_IS -it_CH -it_IT -iu_CA -ja_JP -ka_GE -kab_DZ -kk_KZ -kl_GL -km_KH -kn_IN -ko_KR -kok_IN -ks_IN -ku_TR -kw_GB -ky_KG -lb_LU -lg_UG -li_BE -li_NL -lij_IT -ln_CD -lo_LA -lt_LT -lv_LV -lzh_TW -mag_IN -mai_IN -mai_NP -mfe_MU -mg_MG -mhr_RU -mi_NZ -miq_NI -mjw_IN -mk_MK -ml_IN -mn_MN -mni_IN -mnw_MM -mr_IN -ms_MY -mt_MT -my_MM -nan_TW -nb_NO -nds_DE -nds_NL -ne_NP -nhn_MX -niu_NU -niu_NZ -nl_AW -nl_BE -nl_NL -nn_NO -nr_ZA -nso_ZA -oc_FR -om_ET -om_KE -or_IN -os_RU -pa_IN -pa_PK -pap_AW -pap_CW -pl_PL -ps_AF -pt_BR -pt_PT -quz_PE -raj_IN -ro_RO -ru_RU -ru_UA -rw_RW -sa_IN -sah_RU -sat_IN -sc_IT -sd_IN -se_NO -sgs_LT -shn_MM -shs_CA -si_LK -sid_ET -sk_SK -sl_SI -sm_WS -so_DJ -so_ET -so_KE -so_SO -sq_AL -sq_MK -sr_ME -sr_RS -ss_ZA -st_ZA -sv_FI -sv_SE -sw_KE -sw_TZ -szl_PL -ta_IN -ta_LK -tcy_IN -te_IN -tg_TJ -th_TH -the_NP -ti_ER -ti_ET -tig_ER -tk_TM -tl_PH -tn_ZA -to_TO -tpi_PG -tr_CY -tr_TR -ts_ZA -tt_RU -ug_CN -uk_UA -unm_US -ur_IN -ur_PK -uz_UZ -ve_ZA -vi_VN -wa_BE -wae_CH -wal_ET -wo_SN -xh_ZA -yi_US -yo_NG -yue_HK -yuw_PG -zh_CN -zh_HK -zh_SG -zh_TW -zu_ZA diff --git a/pmb/data/qemu-user-binfmt.txt b/pmb/data/qemu-user-binfmt.txt deleted file mode 100644 index 431d303d..00000000 --- a/pmb/data/qemu-user-binfmt.txt +++ /dev/null @@ -1,61 +0,0 @@ -# Magic and mask parameters of Linux ELF binaries of various CPU architectures, -# required to configure binfmt_misc to run foreign arch binaries with QEMU. -# Extracted from Debian's QEMU packaging ("binfmt-update-in"). -# See also: - - aarch64_magic='\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\xb7\x00' - aarch64_mask='\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff' - alpha_magic='\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x26\x90' - alpha_mask='\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff' - arm_magic='\x7f\x45\x4c\x46\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x28\x00' - arm_mask='\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff' - armeb_magic='\x7f\x45\x4c\x46\x01\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x28' - armeb_mask='\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff' - cris_magic='\x7f\x45\x4c\x46\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x4c\x00' - cris_mask='\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff' - hppa_magic='\x7f\x45\x4c\x46\x01\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x0f' - hppa_mask='\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff' - i386_magic='\x7f\x45\x4c\x46\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x03\x00' - i386_mask='\xff\xff\xff\xff\xff\xfe\xfe\xfc\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff' - m68k_magic='\x7f\x45\x4c\x46\x01\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x04' - m68k_mask='\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff' - microblaze_magic='\x7f\x45\x4c\x46\x01\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xba\xab' - microblaze_mask='\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff' - mips_magic='\x7f\x45\x4c\x46\x01\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x08' - mips_mask='\xff\xff\xff\xff\xff\xff\xff\x00\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff' - mipsel_magic='\x7f\x45\x4c\x46\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x08\x00' - mipsel_mask='\xff\xff\xff\xff\xff\xff\xff\x00\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff' - mips64_magic='\x7f\x45\x4c\x46\x02\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x08' - mips64_mask='\xff\xff\xff\xff\xff\xff\xff\x00\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff' - mips64el_magic='\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x08\x00' - mips64el_mask='\xff\xff\xff\xff\xff\xff\xff\x00\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff' - ppc_magic='\x7f\x45\x4c\x46\x01\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x14' - ppc_mask='\xff\xff\xff\xff\xff\xff\xff\xfc\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff' - ppc64_magic='\x7f\x45\x4c\x46\x02\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x15' - ppc64_mask='\xff\xff\xff\xff\xff\xff\xff\xfc\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff' - ppc64abi32_magic='\x7f\x45\x4c\x46\x01\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x15' - ppc64abi32_mask='\xff\xff\xff\xff\xff\xff\xff\xfc\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff' - ppc64le_magic='\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x15\x00' - ppc64le_mask='\xff\xff\xff\xff\xff\xff\xff\xfc\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\x00' - riscv32_magic='\x7f\x45\x4c\x46\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\xf3\x00' - riscv32_mask='\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff' - riscv64_magic='\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\xf3\x00' - riscv64_mask='\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff' - s390x_magic='\x7f\x45\x4c\x46\x02\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x16' - s390x_mask='\xff\xff\xff\xff\xff\xff\xff\xfc\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff' - sh4_magic='\x7f\x45\x4c\x46\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x2a\x00' - sh4_mask='\xff\xff\xff\xff\xff\xff\xff\xfc\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff' - sh4eb_magic='\x7f\x45\x4c\x46\x01\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x2a' - sh4eb_mask='\xff\xff\xff\xff\xff\xff\xff\xfc\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff' - sparc_magic='\x7f\x45\x4c\x46\x01\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x02' - sparc_mask='\xff\xff\xff\xff\xff\xff\xff\xfc\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff' -sparc32plus_magic='\x7f\x45\x4c\x46\x01\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x12' - sparc32plus_mask='\xff\xff\xff\xff\xff\xff\xff\xfc\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff' - sparc64_magic='\x7f\x45\x4c\x46\x02\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x2b' - sparc64_mask='\xff\xff\xff\xff\xff\xff\xff\xfc\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff' - x86_64_magic='\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x3e\x00' - x86_64_mask='\xff\xff\xff\xff\xff\xfe\xfe\xfc\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff' - xtensa_magic='\x7f\x45\x4c\x46\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x5e\x00' - xtensa_mask='\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff' - xtensaeb_magic='\x7f\x45\x4c\x46\x01\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x5e' - xtensaeb_mask='\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff' diff --git a/pmb/export/__init__.py b/pmb/export/__init__.py deleted file mode 100644 index 40656df9..00000000 --- a/pmb/export/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -from pmb.export.frontend import frontend -from pmb.export.odin import odin -from pmb.export.symlinks import symlinks diff --git a/pmb/export/frontend.py b/pmb/export/frontend.py deleted file mode 100644 index 03256e5d..00000000 --- a/pmb/export/frontend.py +++ /dev/null @@ -1,33 +0,0 @@ -import glob -import logging -import os - -import pmb.helpers.run -import pmb.helpers.frontend -import pmb.chroot.initfs -import pmb.export - - -def frontend(args): - # Create the export folder - target = args.export_folder - if not os.path.exists(target): - pmb.helpers.run.user(args, ["mkdir", "-p", target]) - - # Rootfs image note - chroot = args.work + "/chroot_native" - pattern = chroot + "/home/pmos/rootfs/" + args.device + "*.img" - if not glob.glob(pattern): - logging.info("NOTE: To export the rootfs image, run 'pmbootstrap" - " install' first (without the 'disk' parameter).") - - # Rebuild the initramfs, just to make sure (see #69) - flavor = pmb.helpers.frontend._parse_flavor(args, args.autoinstall) - if args.autoinstall: - pmb.chroot.initfs.build(args, flavor, "rootfs_" + args.device) - - # Do the export, print all files - logging.info("Export symlinks to: " + target) - if args.odin_flashable_tar: - pmb.export.odin(args, flavor, target) - pmb.export.symlinks(args, flavor, target) diff --git a/pmb/export/odin.py b/pmb/export/odin.py deleted file mode 100644 index 3417344b..00000000 --- a/pmb/export/odin.py +++ /dev/null @@ -1,108 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import os - -import pmb.build -import pmb.chroot.apk -import pmb.config -import pmb.flasher -import pmb.helpers.file - - -def odin(args, flavor, folder): - """ - Create Odin flashable tar file with kernel and initramfs - for devices configured with the flasher method 'heimdall-isorec' - and with boot.img for devices with 'heimdall-bootimg' - """ - pmb.flasher.init(args) - suffix = "rootfs_" + args.device - - # Backwards compatibility with old mkinitfs (pma#660) - suffix_flavor = f"-{flavor}" - pmaports_cfg = pmb.config.pmaports.read_config(args) - if pmaports_cfg.get("supported_mkinitfs_without_flavors", False): - suffix_flavor = "" - - # Validate method - method = args.deviceinfo["flash_method"] - if not method.startswith("heimdall-"): - raise RuntimeError("An odin flashable tar is not supported" - f" for the flash method '{method}' specified" - " in the current configuration." - " Only 'heimdall' methods are supported.") - - # Partitions - partition_kernel = \ - args.deviceinfo["flash_heimdall_partition_kernel"] or "KERNEL" - partition_initfs = \ - args.deviceinfo["flash_heimdall_partition_initfs"] or "RECOVERY" - - # Temporary folder - temp_folder = "/tmp/odin-flashable-tar" - if os.path.exists(f"{args.work}/chroot_native{temp_folder}"): - pmb.chroot.root(args, ["rm", "-rf", temp_folder]) - - # Odin flashable tar generation script - # (because redirecting stdin/stdout is not allowed - # in pmbootstrap's chroot/shell functions for security reasons) - odin_script = f"{args.work}/chroot_rootfs_{args.device}/tmp/_odin.sh" - with open(odin_script, "w") as handle: - odin_kernel_md5 = f"{partition_kernel}.bin.md5" - odin_initfs_md5 = f"{partition_initfs}.bin.md5" - odin_device_tar = f"{args.device}.tar" - odin_device_tar_md5 = f"{args.device}.tar.md5" - - handle.write( - "#!/bin/sh\n" - f"cd {temp_folder}\n") - if method == "heimdall-isorec": - handle.write( - # Kernel: copy and append md5 - f"cp /boot/vmlinuz{suffix_flavor} {odin_kernel_md5}\n" - f"md5sum -t {odin_kernel_md5} >> {odin_kernel_md5}\n" - # Initramfs: recompress with lzop, append md5 - f"gunzip -c /boot/initramfs{suffix_flavor}" - f" | lzop > {odin_initfs_md5}\n" - f"md5sum -t {odin_initfs_md5} >> {odin_initfs_md5}\n") - elif method == "heimdall-bootimg": - handle.write( - # boot.img: copy and append md5 - f"cp /boot/boot.img{suffix_flavor} {odin_kernel_md5}\n" - f"md5sum -t {odin_kernel_md5} >> {odin_kernel_md5}\n") - handle.write( - # Create tar, remove included files and append md5 - f"tar -c -f {odin_device_tar} *.bin.md5\n" - "rm *.bin.md5\n" - f"md5sum -t {odin_device_tar} >> {odin_device_tar}\n" - f"mv {odin_device_tar} {odin_device_tar_md5}\n") - - commands = [["mkdir", "-p", temp_folder], - ["cat", "/tmp/_odin.sh"], # for the log - ["sh", "/tmp/_odin.sh"], - ["rm", "/tmp/_odin.sh"] - ] - for command in commands: - pmb.chroot.root(args, command, suffix) - - # Move Odin flashable tar to native chroot and cleanup temp folder - pmb.chroot.user(args, ["mkdir", "-p", "/home/pmos/rootfs"]) - pmb.chroot.root(args, ["mv", f"/mnt/rootfs_{args.device}{temp_folder}" - f"/{odin_device_tar_md5}", "/home/pmos/rootfs/"]), - pmb.chroot.root(args, ["chown", "pmos:pmos", - f"/home/pmos/rootfs/{odin_device_tar_md5}"]) - pmb.chroot.root(args, ["rmdir", temp_folder], suffix) - - # Create the symlink - file = f"{args.work}/chroot_native/home/pmos/rootfs/{odin_device_tar_md5}" - link = f"{folder}/{odin_device_tar_md5}" - pmb.helpers.file.symlink(args, file, link) - - # Display a readable message - msg = f" * {odin_device_tar_md5}" - if method == "heimdall-isorec": - msg += " (Odin flashable file, contains initramfs and kernel)" - elif method == "heimdall-bootimg": - msg += " (Odin flashable file, contains boot.img)" - logging.info(msg) diff --git a/pmb/export/symlinks.py b/pmb/export/symlinks.py deleted file mode 100644 index 04c5a952..00000000 --- a/pmb/export/symlinks.py +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import os -import glob - -import pmb.build -import pmb.chroot.apk -import pmb.config -import pmb.config.pmaports -import pmb.flasher -import pmb.helpers.file - - -def symlinks(args, flavor, folder): - """ - Create convenience symlinks to the rootfs and boot files. - """ - - # Backwards compatibility with old mkinitfs (pma#660) - suffix = f"-{flavor}" - pmaports_cfg = pmb.config.pmaports.read_config(args) - if pmaports_cfg.get("supported_mkinitfs_without_flavors", False): - suffix = "" - - # File descriptions - info = { - f"boot.img{suffix}": ("Fastboot compatible boot.img file," - " contains initramfs and kernel"), - "dtbo.img": "Fastboot compatible dtbo image", - f"initramfs{suffix}": "Initramfs", - f"initramfs{suffix}-extra": "Extra initramfs files in /boot", - f"uInitrd{suffix}": "Initramfs, legacy u-boot image format", - f"uImage{suffix}": "Kernel, legacy u-boot image format", - f"vmlinuz{suffix}": "Linux kernel", - f"{args.device}.img": "Rootfs with partitions for /boot and /", - f"{args.device}-boot.img": "Boot partition image", - f"{args.device}-root.img": "Root partition image", - f"pmos-{args.device}.zip": "Android recovery flashable zip", - "lk2nd.img": "Secondary Android bootloader", - } - - # Generate a list of patterns - path_native = args.work + "/chroot_native" - path_boot = args.work + "/chroot_rootfs_" + args.device + "/boot" - path_buildroot = args.work + "/chroot_buildroot_" + args.deviceinfo["arch"] - patterns = [f"{path_boot}/boot.img{suffix}", - f"{path_boot}/initramfs{suffix}*", - f"{path_boot}/uInitrd{suffix}", - f"{path_boot}/uImage{suffix}", - f"{path_boot}/vmlinuz{suffix}", - f"{path_boot}/dtbo.img", - f"{path_native}/home/pmos/rootfs/{args.device}.img", - f"{path_native}/home/pmos/rootfs/{args.device}-boot.img", - f"{path_native}/home/pmos/rootfs/{args.device}-root.img", - f"{path_buildroot}/var/lib/postmarketos-android-recovery-" + - f"installer/pmos-{args.device}.zip", - f"{path_boot}/lk2nd.img"] - - # Generate a list of files from the patterns - files = [] - for pattern in patterns: - files += glob.glob(pattern) - - # Iterate through all files - for file in files: - basename = os.path.basename(file) - link = folder + "/" + basename - - # Display a readable message - msg = " * " + basename - if basename in info: - msg += " (" + info[basename] + ")" - logging.info(msg) - - pmb.helpers.file.symlink(args, file, link) diff --git a/pmb/flasher/__init__.py b/pmb/flasher/__init__.py deleted file mode 100644 index 2fd1c1fd..00000000 --- a/pmb/flasher/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -from pmb.flasher.init import init -from pmb.flasher.init import install_depends -from pmb.flasher.run import run -from pmb.flasher.run import check_partition_blacklist -from pmb.flasher.variables import variables -from pmb.flasher.frontend import frontend diff --git a/pmb/flasher/frontend.py b/pmb/flasher/frontend.py deleted file mode 100644 index 813c4bd4..00000000 --- a/pmb/flasher/frontend.py +++ /dev/null @@ -1,171 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import os - -import pmb.config -import pmb.flasher -import pmb.install -import pmb.chroot.apk -import pmb.chroot.initfs -import pmb.chroot.other -import pmb.helpers.frontend -import pmb.parse.kconfig - - -def kernel(args): - # Rebuild the initramfs, just to make sure (see #69) - flavor = pmb.helpers.frontend._parse_flavor(args, args.autoinstall) - if args.autoinstall: - pmb.chroot.initfs.build(args, flavor, "rootfs_" + args.device) - - # Check kernel config - pmb.parse.kconfig.check(args, flavor, must_exist=False) - - # Generate the paths and run the flasher - if args.action_flasher == "boot": - logging.info("(native) boot " + flavor + " kernel") - pmb.flasher.run(args, "boot", flavor) - else: - logging.info("(native) flash kernel " + flavor) - pmb.flasher.run(args, "flash_kernel", flavor) - logging.info("You will get an IP automatically assigned to your " - "USB interface shortly.") - logging.info("Then you can connect to your device using ssh after pmOS has" - " booted:") - logging.info("ssh {}@{}".format(args.user, pmb.config.default_ip)) - logging.info("NOTE: If you enabled full disk encryption, you should make" - " sure that osk-sdl has been properly configured for your" - " device") - - -def list_flavors(args): - suffix = "rootfs_" + args.device - logging.info("(" + suffix + ") installed kernel flavors:") - logging.info("* " + pmb.chroot.other.kernel_flavor_installed(args, suffix)) - - -def rootfs(args): - method = args.flash_method or args.deviceinfo["flash_method"] - - # Generate rootfs, install flasher - suffix = ".img" - if pmb.config.flashers.get(method, {}).get("split", False): - suffix = "-root.img" - - img_path = f"{args.work}/chroot_native/home/pmos/rootfs/{args.device}"\ - f"{suffix}" - if not os.path.exists(img_path): - raise RuntimeError("The rootfs has not been generated yet, please run" - " 'pmbootstrap install' first.") - - # Do not flash if using fastboot & image is too large - if method.startswith("fastboot") \ - and args.deviceinfo["flash_fastboot_max_size"]: - img_size = os.path.getsize(img_path) / 1024**2 - max_size = int(args.deviceinfo["flash_fastboot_max_size"]) - if img_size > max_size: - raise RuntimeError("The rootfs is too large for fastboot to" - " flash.") - - # Run the flasher - logging.info("(native) flash rootfs image") - pmb.flasher.run(args, "flash_rootfs") - - -def flash_vbmeta(args): - logging.info("(native) flash vbmeta.img with verity disabled flag") - pmb.flasher.run(args, "flash_vbmeta") - - -def flash_dtbo(args): - logging.info("(native) flash dtbo image") - pmb.flasher.run(args, "flash_dtbo") - - -def list_devices(args): - pmb.flasher.run(args, "list_devices") - - -def sideload(args): - # Install depends - pmb.flasher.install_depends(args) - - # Mount the buildroot - suffix = "buildroot_" + args.deviceinfo["arch"] - mountpoint = "/mnt/" + suffix - pmb.helpers.mount.bind(args, args.work + "/chroot_" + suffix, - args.work + "/chroot_native/" + mountpoint) - - # Missing recovery zip error - zip_path = ("/var/lib/postmarketos-android-recovery-installer/pmos-" + - args.device + ".zip") - if not os.path.exists(args.work + "/chroot_native" + mountpoint + - zip_path): - raise RuntimeError("The recovery zip has not been generated yet," - " please run 'pmbootstrap install' with the" - " '--android-recovery-zip' parameter first!") - - pmb.flasher.run(args, "sideload") - - -def flash_lk2nd(args): - method = args.flash_method or args.deviceinfo["flash_method"] - if method == "fastboot": - # In the future this could be expanded to use "fastboot flash lk2nd $img" - # which reflashes/updates lk2nd from itself. For now let the user handle this - # manually since supporting the codepath with heimdall requires more effort. - pmb.flasher.init(args) - logging.info("(native) checking current fastboot product") - output = pmb.chroot.root(args, ["fastboot", "getvar", "product"], - output="interactive", output_return=True) - # Variable "product" is e.g. "LK2ND_MSM8974" or "lk2nd-msm8226" depending - # on the lk2nd version. - if "lk2nd" in output.lower(): - raise RuntimeError("You are currently running lk2nd. Please reboot into the regular" - " bootloader mode to re-flash lk2nd.") - - # Get the lk2nd package (which is a dependency of the device package) - device_pkg = f"device-{args.device}" - apkbuild = pmb.helpers.pmaports.get(args, device_pkg) - lk2nd_pkg = None - for dep in apkbuild["depends"]: - if dep.startswith("lk2nd"): - lk2nd_pkg = dep - break - - if not lk2nd_pkg: - raise RuntimeError(f"{device_pkg} does not depend on any lk2nd package") - - suffix = "rootfs_" + args.device - pmb.chroot.apk.install(args, [lk2nd_pkg], suffix) - - logging.info("(native) flash lk2nd image") - pmb.flasher.run(args, "flash_lk2nd") - - -def frontend(args): - action = args.action_flasher - method = args.flash_method or args.deviceinfo["flash_method"] - - if method == "none" and action in ["boot", "flash_kernel", "flash_rootfs", - "flash_lk2nd"]: - logging.info("This device doesn't support any flash method.") - return - - if action in ["boot", "flash_kernel"]: - kernel(args) - elif action == "flash_rootfs": - rootfs(args) - elif action == "flash_vbmeta": - flash_vbmeta(args) - elif action == "flash_dtbo": - flash_dtbo(args) - elif action == "flash_lk2nd": - flash_lk2nd(args) - elif action == "list_flavors": - list_flavors(args) - elif action == "list_devices": - list_devices(args) - elif action == "sideload": - sideload(args) diff --git a/pmb/flasher/init.py b/pmb/flasher/init.py deleted file mode 100644 index 3569bcce..00000000 --- a/pmb/flasher/init.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import pmb.chroot.apk -import pmb.config -import pmb.config.pmaports -import pmb.helpers.mount - - -def install_depends(args): - if hasattr(args, 'flash_method'): - method = args.flash_method or args.deviceinfo["flash_method"] - else: - method = args.deviceinfo["flash_method"] - - if method not in pmb.config.flashers: - raise RuntimeError(f"Flash method {method} is not supported by the" - " current configuration. However, adding a new" - " flash method is not that hard, when the flashing" - " application already exists.\n" - "Make sure, it is packaged for Alpine Linux, or" - " package it yourself, and then add it to" - " pmb/config/__init__.py.") - depends = pmb.config.flashers[method]["depends"] - - # Depends for some flash methods may be different for various pmaports - # branches, so read them from pmaports.cfg. - if method == "fastboot": - pmaports_cfg = pmb.config.pmaports.read_config(args) - depends = pmaports_cfg.get("supported_fastboot_depends", - "android-tools,avbtool").split(",") - elif method == "heimdall-bootimg": - pmaports_cfg = pmb.config.pmaports.read_config(args) - depends = pmaports_cfg.get("supported_heimdall_depends", - "heimdall,avbtool").split(",") - elif method == "mtkclient": - pmaports_cfg = pmb.config.pmaports.read_config(args) - depends = pmaports_cfg.get("supported_mtkclient_depends", - "mtkclient,android-tools").split(",") - - pmb.chroot.apk.install(args, depends) - - -def init(args): - install_depends(args) - - # Mount folders from host system - for folder in pmb.config.flash_mount_bind: - pmb.helpers.mount.bind(args, folder, args.work + - "/chroot_native" + folder) - - # Mount device chroot inside native chroot (required for kernel/ramdisk) - mountpoint = "/mnt/rootfs_" + args.device - pmb.helpers.mount.bind(args, args.work + "/chroot_rootfs_" + args.device, - args.work + "/chroot_native" + mountpoint) diff --git a/pmb/flasher/run.py b/pmb/flasher/run.py deleted file mode 100644 index ad106f8a..00000000 --- a/pmb/flasher/run.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import pmb.flasher -import pmb.chroot.initfs - - -def check_partition_blacklist(args, key, value): - if not key.startswith("$PARTITION_"): - return - - name = args.deviceinfo["name"] - if value in args.deviceinfo["partition_blacklist"].split(","): - raise RuntimeError("'" + value + "'" + " partition is blacklisted " + - "from being flashed! See the " + name + " device " + - "wiki page for more information.") - - -def run(args, action, flavor=None): - pmb.flasher.init(args) - - # Verify action - method = args.flash_method or args.deviceinfo["flash_method"] - cfg = pmb.config.flashers[method] - if action not in cfg["actions"]: - raise RuntimeError("action " + action + " is not" - " configured for method " + method + "!" - " You can use the '--method' option to specify a" - " different flash method. See also:" - " ") - - # Variable setup - vars = pmb.flasher.variables(args, flavor, method) - - # vbmeta flasher requires vbmeta partition to be explicitly specified - if action == "flash_vbmeta" and not vars["$PARTITION_VBMETA"]: - raise RuntimeError("Your device does not have 'vbmeta' partition" - " specified; set" - " 'deviceinfo_flash_fastboot_partition_vbmeta'" - " or 'deviceinfo_flash_heimdall_partition_vbmeta'" - " in deviceinfo file. See also:" - " ") - - # dtbo flasher requires dtbo partition to be explicitly specified - if action == "flash_dtbo" and not vars["$PARTITION_DTBO"]: - raise RuntimeError("Your device does not have 'dtbo' partition" - " specified; set" - " 'deviceinfo_flash_fastboot_partition_dtbo'" - " in deviceinfo file. See also:" - " ") - - if args.no_reboot and ("flash" not in action or method != "heimdall-bootimg"): - raise RuntimeError("The '--no-reboot' option is only" - " supported when flashing with heimall-bootimg.") - - if args.resume and ("flash" not in action or method != "heimdall-bootimg"): - raise RuntimeError("The '--resume' option is only" - " supported when flashing with heimall-bootimg.") - - # Run the commands of each action - for command in cfg["actions"][action]: - # Variable replacement - for key, value in vars.items(): - for i in range(len(command)): - if key in command[i]: - if value is None: - raise RuntimeError(f"Variable {key} found in action" - f" {action} for method {method}," - " but the value for this variable" - " is None! Is that missing in your" - " deviceinfo?") - check_partition_blacklist(args, key, value) - command[i] = command[i].replace(key, value) - - # Remove empty strings - command = [x for x in command if x != ''] - # Run the action - pmb.chroot.root(args, command, output="interactive") diff --git a/pmb/flasher/variables.py b/pmb/flasher/variables.py deleted file mode 100644 index 393a1846..00000000 --- a/pmb/flasher/variables.py +++ /dev/null @@ -1,103 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import pmb.config.pmaports - - -def variables(args, flavor, method): - _cmdline = args.deviceinfo["kernel_cmdline"] or "" - if "cmdline" in args and args.cmdline: - _cmdline = args.cmdline - - flash_pagesize = args.deviceinfo['flash_pagesize'] - - # TODO Remove _partition_system deviceinfo support once pmaports has been - # updated and minimum pmbootstrap version bumped. - # See also https://gitlab.com/postmarketOS/pmbootstrap/-/issues/2243 - - if method.startswith("fastboot"): - _partition_kernel = args.deviceinfo["flash_fastboot_partition_kernel"]\ - or "boot" - _partition_rootfs = args.deviceinfo["flash_fastboot_partition_rootfs"]\ - or args.deviceinfo["flash_fastboot_partition_system"] or "userdata" - _partition_vbmeta = args.deviceinfo["flash_fastboot_partition_vbmeta"]\ - or None - _partition_dtbo = args.deviceinfo["flash_fastboot_partition_dtbo"]\ - or None - # Require that the partitions are specified in deviceinfo for now - elif method.startswith("rkdeveloptool"): - _partition_kernel = args.deviceinfo["flash_rk_partition_kernel"]\ - or None - _partition_rootfs = args.deviceinfo["flash_rk_partition_rootfs"]\ - or args.deviceinfo["flash_rk_partition_system"] or None - _partition_vbmeta = None - _partition_dtbo = None - elif method.startswith("mtkclient"): - _partition_kernel = args.deviceinfo["flash_mtkclient_partition_kernel"]\ - or "boot" - _partition_rootfs = args.deviceinfo["flash_mtkclient_partition_rootfs"]\ - or "userdata" - _partition_vbmeta = args.deviceinfo["flash_mtkclient_partition_vbmeta"]\ - or None - _partition_dtbo = args.deviceinfo["flash_mtkclient_partition_dtbo"]\ - or None - else: - _partition_kernel = args.deviceinfo["flash_heimdall_partition_kernel"]\ - or "KERNEL" - _partition_rootfs = args.deviceinfo["flash_heimdall_partition_rootfs"]\ - or args.deviceinfo["flash_heimdall_partition_system"] or "SYSTEM" - _partition_vbmeta = args.deviceinfo["flash_heimdall_partition_vbmeta"]\ - or None - _partition_dtbo = args.deviceinfo["flash_heimdall_partition_dtbo"]\ - or None - - if "partition" in args and args.partition: - # Only one operation is done at same time so it doesn't matter - # sharing the arg - _partition_kernel = args.partition - _partition_rootfs = args.partition - _partition_vbmeta = args.partition - _partition_dtbo = args.partition - - _dtb = "" - if args.deviceinfo["append_dtb"] == "true": - _dtb = "-dtb" - - _no_reboot = "" - if args.no_reboot: - _no_reboot = "--no-reboot" - - _resume = "" - if args.resume: - _resume = "--resume" - - vars = { - "$BOOT": "/mnt/rootfs_" + args.device + "/boot", - "$DTB": _dtb, - "$IMAGE_SPLIT_BOOT": "/home/pmos/rootfs/" + args.device + "-boot.img", - "$IMAGE_SPLIT_ROOT": "/home/pmos/rootfs/" + args.device + "-root.img", - "$IMAGE": "/home/pmos/rootfs/" + args.device + ".img", - "$KERNEL_CMDLINE": _cmdline, - "$PARTITION_KERNEL": _partition_kernel, - "$PARTITION_INITFS": args.deviceinfo[ - "flash_heimdall_partition_initfs"] or "RECOVERY", - "$PARTITION_ROOTFS": _partition_rootfs, - "$PARTITION_VBMETA": _partition_vbmeta, - "$PARTITION_DTBO": _partition_dtbo, - "$FLASH_PAGESIZE": flash_pagesize, - "$RECOVERY_ZIP": "/mnt/buildroot_" + args.deviceinfo["arch"] + - "/var/lib/postmarketos-android-recovery-installer" - "/pmos-" + args.device + ".zip", - "$UUU_SCRIPT": "/mnt/rootfs_" + args.deviceinfo["codename"] + - "/usr/share/uuu/flash_script.lst", - "$NO_REBOOT": _no_reboot, - "$RESUME": _resume - } - - # Backwards compatibility with old mkinitfs (pma#660) - pmaports_cfg = pmb.config.pmaports.read_config(args) - if pmaports_cfg.get("supported_mkinitfs_without_flavors", False): - vars["$FLAVOR"] = "" - else: - vars["$FLAVOR"] = f"-{flavor}" if flavor is not None else "-" - - return vars diff --git a/pmb/helpers/__init__.py b/pmb/helpers/__init__.py deleted file mode 100644 index aa2f5ff1..00000000 --- a/pmb/helpers/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later diff --git a/pmb/helpers/apk.py b/pmb/helpers/apk.py deleted file mode 100644 index 774ca0b9..00000000 --- a/pmb/helpers/apk.py +++ /dev/null @@ -1,134 +0,0 @@ -# Copyright 2023 Johannes Marbach, Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os - -import pmb.chroot.root -import pmb.config.pmaports -import pmb.helpers.cli -import pmb.helpers.run -import pmb.helpers.run_core -import pmb.parse.version - - -def _run(args, command, chroot=False, suffix="native", output="log"): - """ - Run a command. - - :param command: command in list form - :param chroot: whether to run the command inside the chroot or on the host - :param suffix: chroot suffix. Only applies if the "chroot" parameter is - set to True. - - See pmb.helpers.run_core.core() for a detailed description of all other - arguments and the return value. - """ - if chroot: - return pmb.chroot.root(args, command, output=output, suffix=suffix, - disable_timeout=True) - return pmb.helpers.run.root(args, command, output=output) - - -def _prepare_fifo(args, chroot=False, suffix="native"): - """ - Prepare the progress fifo for reading / writing. - - :param chroot: whether to run the command inside the chroot or on the host - :param suffix: chroot suffix. Only applies if the "chroot" parameter is - set to True. - :returns: A tuple consisting of the path to the fifo as needed by apk to - write into it (relative to the chroot, if applicable) and the - path of the fifo as needed by cat to read from it (always - relative to the host) - """ - if chroot: - fifo = "/tmp/apk_progress_fifo" - fifo_outside = f"{args.work}/chroot_{suffix}{fifo}" - else: - _run(args, ["mkdir", "-p", f"{args.work}/tmp"]) - fifo = fifo_outside = f"{args.work}/tmp/apk_progress_fifo" - if os.path.exists(fifo_outside): - _run(args, ["rm", "-f", fifo_outside]) - _run(args, ["mkfifo", fifo_outside]) - return (fifo, fifo_outside) - - -def _create_command_with_progress(command, fifo): - """ - Build a full apk command from a subcommand, set up to redirect progress - into a fifo. - - :param command: apk subcommand in list form - :param fifo: path of the fifo - :returns: full command in list form - """ - flags = ["--no-progress", "--progress-fd", "3"] - command_full = [command[0]] + flags + command[1:] - command_flat = pmb.helpers.run_core.flat_cmd(command_full) - command_flat = f"exec 3>{fifo}; {command_flat}" - return ["sh", "-c", command_flat] - - -def _compute_progress(line): - """ - Compute the progress as a number between 0 and 1. - - :param line: line as read from the progress fifo - :returns: progress as a number between 0 and 1 - """ - if not line: - return 1 - cur_tot = line.rstrip().split('/') - if len(cur_tot) != 2: - return 0 - cur = float(cur_tot[0]) - tot = float(cur_tot[1]) - return cur / tot if tot > 0 else 0 - - -def apk_with_progress(args, command, chroot=False, suffix="native"): - """ - Run an apk subcommand while printing a progress bar to STDOUT. - - :param command: apk subcommand in list form - :param chroot: whether to run commands inside the chroot or on the host - :param suffix: chroot suffix. Only applies if the "chroot" parameter is - set to True. - :raises RuntimeError: when the apk command fails - """ - fifo, fifo_outside = _prepare_fifo(args, chroot, suffix) - command_with_progress = _create_command_with_progress(command, fifo) - log_msg = " ".join(command) - with _run(args, ['cat', fifo], chroot=chroot, suffix=suffix, - output="pipe") as p_cat: - with _run(args, command_with_progress, chroot=chroot, suffix=suffix, - output="background") as p_apk: - while p_apk.poll() is None: - line = p_cat.stdout.readline().decode('utf-8') - progress = _compute_progress(line) - pmb.helpers.cli.progress_print(args, progress) - pmb.helpers.cli.progress_flush(args) - pmb.helpers.run_core.check_return_code(args, p_apk.returncode, - log_msg) - - -def check_outdated(args, version_installed, action_msg): - """ - Check if the provided alpine version is outdated, depending on the alpine - mirrordir (edge, v3.12, ...) related to currently checked out pmaports - branch. - - :param version_installed: currently installed apk version, e.g. "2.12.1-r0" - :param action_msg: string explaining what the user should do to resolve - this - :raises: RuntimeError if the version is outdated - """ - channel_cfg = pmb.config.pmaports.read_config_channel(args) - mirrordir_alpine = channel_cfg["mirrordir_alpine"] - version_min = pmb.config.apk_tools_min_version[mirrordir_alpine] - - if pmb.parse.version.compare(version_installed, version_min) >= 0: - return - - raise RuntimeError("Found an outdated version of the 'apk' package" - f" manager ({version_installed}, expected at least:" - f" {version_min}). {action_msg}") diff --git a/pmb/helpers/aportupgrade.py b/pmb/helpers/aportupgrade.py deleted file mode 100644 index 4f28659a..00000000 --- a/pmb/helpers/aportupgrade.py +++ /dev/null @@ -1,291 +0,0 @@ -# Copyright 2023 Luca Weiss -# SPDX-License-Identifier: GPL-3.0-or-later -import datetime -import fnmatch -import logging -import os -import re -import urllib -from typing import Optional - -import pmb.helpers.file -import pmb.helpers.http -import pmb.helpers.pmaports - -req_headers = None -req_headers_github = None - -ANITYA_API_BASE = "https://release-monitoring.org/api/v2" -GITHUB_API_BASE = "https://api.github.com" -GITLAB_HOSTS = [ - "https://gitlab.com", - "https://gitlab.freedesktop.org", - "https://gitlab.gnome.org", - "https://invent.kde.org", - "https://source.puri.sm", -] - - -def init_req_headers() -> None: - global req_headers - global req_headers_github - # Only initialize them once - if req_headers is not None and req_headers_github is not None: - return - # Generic request headers - req_headers = { - 'User-Agent': f'pmbootstrap/{pmb.__version__} aportupgrade'} - - # Request headers specific to GitHub - req_headers_github = dict(req_headers) - if os.getenv("GITHUB_TOKEN") is not None: - token = os.getenv("GITHUB_TOKEN") - req_headers_github['Authorization'] = f'token {token}' - else: - logging.info("NOTE: Consider using a GITHUB_TOKEN environment variable" - " to increase your rate limit") - - -def get_package_version_info_github(repo_name: str, ref: Optional[str]): - logging.debug("Trying GitHub repository: {}".format(repo_name)) - - # Get the URL argument to request a special ref, if needed - ref_arg = "" - if ref is not None: - ref_arg = f"?sha={ref}" - - # Get the commits for the repository - commits = pmb.helpers.http.retrieve_json( - f"{GITHUB_API_BASE}/repos/{repo_name}/commits{ref_arg}", - headers=req_headers_github) - latest_commit = commits[0] - commit_date = latest_commit["commit"]["committer"]["date"] - # Extract the time from the field - date = datetime.datetime.strptime(commit_date, "%Y-%m-%dT%H:%M:%SZ") - return { - "sha": latest_commit["sha"], - "date": date, - } - - -def get_package_version_info_gitlab(gitlab_host: str, repo_name: str, - ref: Optional[str]): - logging.debug("Trying GitLab repository: {}".format(repo_name)) - - repo_name_safe = urllib.parse.quote(repo_name, safe='') - - # Get the URL argument to request a special ref, if needed - ref_arg = "" - if ref is not None: - ref_arg = f"?ref_name={ref}" - - # Get the commits for the repository - commits = pmb.helpers.http.retrieve_json( - f"{gitlab_host}/api/v4/projects/{repo_name_safe}/repository" - f"/commits{ref_arg}", - headers=req_headers) - latest_commit = commits[0] - commit_date = latest_commit["committed_date"] - # Extract the time from the field - # 2019-10-14T09:32:00.000Z / 2019-12-27T07:58:53.000-05:00 - date = datetime.datetime.strptime(commit_date, "%Y-%m-%dT%H:%M:%S.000%z") - return { - "sha": latest_commit["id"], - "date": date, - } - - -def upgrade_git_package(args, pkgname: str, package) -> bool: - """ - Update _commit/pkgver/pkgrel in a git-APKBUILD (or pretend to do it if - args.dry is set). - :param pkgname: the package name - :param package: a dict containing package information - :returns: if something (would have) been changed - """ - # Get the wanted source line - source = package["source"][0] - source = re.split(r"::", source) - if 1 <= len(source) <= 2: - source = source[-1] - else: - raise RuntimeError("Unhandled number of source elements. Please open" - f" a bug report: {source}") - - verinfo = None - - github_match = re.match( - r"https://github\.com/(.+)/(?:archive|releases)", source) - gitlab_match = re.match( - fr"({'|'.join(GITLAB_HOSTS)})/(.+)/-/archive/", source) - if github_match: - verinfo = get_package_version_info_github( - github_match.group(1), args.ref) - elif gitlab_match: - verinfo = get_package_version_info_gitlab( - gitlab_match.group(1), gitlab_match.group(2), args.ref) - - if verinfo is None: - # ignore for now - logging.warning("{}: source not handled: {}".format(pkgname, source)) - return False - - # Get the new commit sha - sha = package["_commit"] - sha_new = verinfo["sha"] - - # Format the new pkgver, keep the value before _git the same - if package["pkgver"] == "9999": - pkgver = package["_pkgver"] - else: - pkgver = package["pkgver"] - - pkgver_match = re.match(r"([\d.]+)_git", pkgver) - date_pkgver = verinfo["date"].strftime("%Y%m%d") - pkgver_new = f"{pkgver_match.group(1)}_git{date_pkgver}" - - # pkgrel will be zero - pkgrel = int(package["pkgrel"]) - pkgrel_new = 0 - - if sha == sha_new: - logging.info("{}: up-to-date".format(pkgname)) - return False - - logging.info("{}: upgrading pmaport".format(pkgname)) - if args.dry: - logging.info(f" Would change _commit from {sha} to {sha_new}") - logging.info(f" Would change pkgver from {pkgver} to {pkgver_new}") - logging.info(f" Would change pkgrel from {pkgrel} to {pkgrel_new}") - return True - - if package["pkgver"] == "9999": - pmb.helpers.file.replace_apkbuild(args, pkgname, "_pkgver", pkgver_new) - else: - pmb.helpers.file.replace_apkbuild(args, pkgname, "pkgver", pkgver_new) - pmb.helpers.file.replace_apkbuild(args, pkgname, "pkgrel", pkgrel_new) - pmb.helpers.file.replace_apkbuild(args, pkgname, "_commit", sha_new, True) - return True - - -def upgrade_stable_package(args, pkgname: str, package) -> bool: - """ - Update _commit/pkgver/pkgrel in an APKBUILD (or pretend to do it if - args.dry is set). - - :param pkgname: the package name - :param package: a dict containing package information - :returns: if something (would have) been changed - """ - - # Looking up if there's a custom mapping from postmarketOS package name - # to Anitya project name. - mappings = pmb.helpers.http.retrieve_json( - f"{ANITYA_API_BASE}/packages/?distribution=postmarketOS" - f"&name={pkgname}", headers=req_headers) - if mappings["total_items"] < 1: - projects = pmb.helpers.http.retrieve_json( - f"{ANITYA_API_BASE}/projects/?name={pkgname}", headers=req_headers) - if projects["total_items"] < 1: - logging.warning(f"{pkgname}: failed to get Anitya project") - return False - else: - project_name = mappings["items"][0]["project"] - ecosystem = mappings["items"][0]["ecosystem"] - projects = pmb.helpers.http.retrieve_json( - f"{ANITYA_API_BASE}/projects/?name={project_name}&" - f"ecosystem={ecosystem}", - headers=req_headers) - - if projects["total_items"] < 1: - logging.warning(f"{pkgname}: didn't find any projects, can't upgrade!") - return False - if projects["total_items"] > 1: - logging.warning(f"{pkgname}: found more than one project, can't " - f"upgrade! Please create an explicit mapping of " - f"\"project\" to the package name.") - return False - - # Get the first, best-matching item - project = projects["items"][0] - - # Check that we got a version number - if len(project["stable_versions"]) < 1: - logging.warning("{}: got no version number, ignoring".format(pkgname)) - return False - - version = project["stable_versions"][0] - - # Compare the pmaports version with the project version - if package["pkgver"] == version: - logging.info("{}: up-to-date".format(pkgname)) - return False - - if package["pkgver"] == "9999": - pkgver = package["_pkgver"] - else: - pkgver = package["pkgver"] - - pkgver_new = version - - pkgrel = package["pkgrel"] - pkgrel_new = 0 - - if not pmb.parse.version.validate(pkgver_new): - logging.warning(f"{pkgname}: would upgrade to invalid pkgver:" - f" {pkgver_new}, ignoring") - return False - - logging.info("{}: upgrading pmaport".format(pkgname)) - if args.dry: - logging.info(f" Would change pkgver from {pkgver} to {pkgver_new}") - logging.info(f" Would change pkgrel from {pkgrel} to {pkgrel_new}") - return True - - if package["pkgver"] == "9999": - pmb.helpers.file.replace_apkbuild(args, pkgname, "_pkgver", pkgver_new) - else: - pmb.helpers.file.replace_apkbuild(args, pkgname, "pkgver", pkgver_new) - - pmb.helpers.file.replace_apkbuild(args, pkgname, "pkgrel", pkgrel_new) - return True - - -def upgrade(args, pkgname, git=True, stable=True) -> bool: - """ - Find new versions of a single package and upgrade it. - - :param pkgname: the name of the package - :param git: True if git packages should be upgraded - :param stable: True if stable packages should be upgraded - :returns: if something (would have) been changed - """ - # Initialize request headers - init_req_headers() - - package = pmb.helpers.pmaports.get(args, pkgname) - # Run the correct function - if "_git" in package["pkgver"]: - if git: - return upgrade_git_package(args, pkgname, package) - else: - if stable: - return upgrade_stable_package(args, pkgname, package) - - -def upgrade_all(args) -> None: - """ - Upgrade all packages, based on args.all, args.all_git and args.all_stable. - """ - for pkgname in pmb.helpers.pmaports.get_list(args): - # Always ignore postmarketOS-specific packages that have no upstream - # source - skip = False - for pattern in pmb.config.upgrade_ignore: - if fnmatch.fnmatch(pkgname, pattern): - skip = True - if skip: - continue - - upgrade(args, pkgname, args.all or args.all_git, - args.all or args.all_stable) diff --git a/pmb/helpers/args.py b/pmb/helpers/args.py deleted file mode 100644 index ea364030..00000000 --- a/pmb/helpers/args.py +++ /dev/null @@ -1,144 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import copy -import os -import pmb.config -import pmb.helpers.git - -""" This file constructs the args variable, which is passed to almost all - functions in the pmbootstrap code base. Here's a listing of the kind of - information it stores. - - 1. Argparse - Variables directly from command line argument parsing (see - pmb/parse/arguments.py, the "dest" parameter of the add_argument() - calls defines where it is stored in args). - - Examples: - args.action ("zap", "chroot", "build" etc.) - args.as_root (True when --as-root is passed) - ... - - 2. Argparse merged with others - Variables from the user's config file (~/.config/pmbootstrap.cfg) that - can be overridden from the command line (pmb/parse/arguments.py) and - fall back to the defaults defined in pmb/config/__init__.py (see - "defaults = {..."). The user's config file gets generated interactively - with "pmbootstrap init". - - Examples: - args.aports ("$WORK/cache_git/pmaports", override with --aports) - args.device ("samsung-i9100", "qemu-amd64" etc.) - args.work ("/home/user/.local/var/pmbootstrap", override with --work) - - 3. Parsed configs - Similar to the cache above, specific config files get parsed and added - to args, so they can get accessed quickly (without parsing the configs - over and over). These configs are not only used in one specific - location, so having a short name for them increases readability of the - code as well. - - Examples: - args.deviceinfo (e.g. {"name": "Mydevice", "arch": "armhf", ...}) -""" - - -def fix_mirrors_postmarketos(args): - """ Fix args.mirrors_postmarketos when it is supposed to be empty or the - default value. - - In pmb/parse/arguments.py, we set the -mp/--mirror-pmOS argument to - action="append" and start off with an empty list. That way, users can - specify multiple custom mirrors by specifying -mp multiple times on the - command line. Here we fix the default and no mirrors case. - - NOTE: we don't use nargs="+", because it does not play nicely with - subparsers: """ - # -mp not specified: use default mirrors - if not args.mirrors_postmarketos: - cfg = pmb.config.load(args) - args.mirrors_postmarketos = \ - cfg["pmbootstrap"]["mirrors_postmarketos"].split(",") - - # -mp="": use no postmarketOS mirrors (build everything locally) - if args.mirrors_postmarketos == [""]: - args.mirrors_postmarketos = [] - - -def check_pmaports_path(args): - """ Make sure that args.aports exists when it was overridden by --aports. - Without this check, 'pmbootstrap init' would start cloning the - pmaports into the default folder when args.aports does not exist. """ - if args.from_argparse.aports and not os.path.exists(args.aports): - raise ValueError("pmaports path (specified with --aports) does" - " not exist: " + args.aports) - - -def replace_placeholders(args): - """ Replace $WORK and ~ (for path variables) in variables from any config - (user's config file, default config settings or config parameters - specified on commandline) """ - - # Replace $WORK - for key, value in pmb.config.defaults.items(): - if key not in args: - continue - old = getattr(args, key) - if isinstance(old, str): - setattr(args, key, old.replace("$WORK", args.work)) - - # Replace ~ (path variables only) - for key in ["aports", "config", "log", "work"]: - if key in args: - setattr(args, key, os.path.expanduser(getattr(args, key))) - - -def add_deviceinfo(args): - """ Add and verify the deviceinfo (only after initialization) """ - setattr(args, "deviceinfo", pmb.parse.deviceinfo(args)) - arch = args.deviceinfo["arch"] - if (arch != pmb.config.arch_native and - arch not in pmb.config.build_device_architectures): - raise ValueError("Arch '" + arch + "' is not available in" - " postmarketOS. If you would like to add it, see:" - " ") - - -def init(args): - # Basic initialization - fix_mirrors_postmarketos(args) - pmb.config.merge_with_args(args) - replace_placeholders(args) - pmb.helpers.other.init_cache() - - # Initialize logs (we could raise errors below) - pmb.helpers.logging.init(args) - - # Initialization code which may raise errors - check_pmaports_path(args) - if args.action not in ["init", "config", "bootimg_analyze", "log", - "pull", "shutdown", "zap"]: - pmb.config.pmaports.read_config(args) - add_deviceinfo(args) - pmb.helpers.git.parse_channels_cfg(args) - - return args - - -def update_work(args, work): - """ Update the work path in args.work and wherever $WORK was used. """ - # Start with the unmodified args from argparse - args_new = copy.deepcopy(args.from_argparse) - - # Keep from the modified args: - # * the unmodified args from argparse (to check if --aports was specified) - args_new.from_argparse = args.from_argparse - - # Generate modified args again, replacing $WORK with the new work folder - # When args.log is different, this also opens the log in the new location - args_new.work = work - args_new = pmb.helpers.args.init(args_new) - - # Overwrite old attributes of args with the new attributes - for key in vars(args_new): - setattr(args, key, getattr(args_new, key)) diff --git a/pmb/helpers/cli.py b/pmb/helpers/cli.py deleted file mode 100644 index a9fefe42..00000000 --- a/pmb/helpers/cli.py +++ /dev/null @@ -1,146 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import datetime -import logging -import os -import re -import readline -import sys - -import pmb.config - - -class ReadlineTabCompleter: - """ Stores intermediate state for completer function """ - def __init__(self, options): - """ - :param options: list of possible completions - """ - self.options = sorted(options) - self.matches = [] - - def completer_func(self, input_text, iteration): - """ - :param input_text: text that shall be autocompleted - :param iteration: how many times "tab" was hit - """ - # First time: build match list - if iteration == 0: - if input_text: - self.matches = [s for s in self.options - if s and s.startswith(input_text)] - else: - self.matches = self.options[:] - - # Return the N'th item from the match list, if we have that many. - if iteration < len(self.matches): - return self.matches[iteration] - return None - - -def ask(question="Continue?", choices=["y", "n"], default="n", - lowercase_answer=True, validation_regex=None, complete=None): - """ - Ask a question on the terminal. - :param question: display prompt - :param choices: short list of possible answers, - displayed after prompt if set - :param default: default value to return if user doesn't input anything - :param lowercase_answer: if True, convert return value to lower case - :param validation_regex: if set, keep asking until regex matches - :param complete: set to a list to enable tab completion - """ - styles = pmb.config.styles - - while True: - date = datetime.datetime.now().strftime("%H:%M:%S") - line = question - if choices: - line += f" ({str.join('/', choices)})" - if default: - line += f" [{default}]" - line_color = f"[{date}] {styles['BOLD']}{line}{styles['END']}" - line = f"[{date}] {line}" - - if complete: - readline.parse_and_bind('tab: complete') - delims = readline.get_completer_delims() - if '-' in delims: - delims = delims.replace('-', '') - readline.set_completer_delims(delims) - readline.set_completer( - ReadlineTabCompleter(complete).completer_func) - - ret = input(f"{line_color}: ") - - # Stop completing (question is answered) - if complete: - # set_completer(None) would use the default file system completer - readline.set_completer(lambda text, state: None) - - if lowercase_answer: - ret = ret.lower() - if ret == "": - ret = str(default) - - pmb.helpers.logging.logfd.write(f"{line}: {ret}\n") - pmb.helpers.logging.logfd.flush() - - # Validate with regex - if not validation_regex: - return ret - - pattern = re.compile(validation_regex) - if pattern.match(ret): - return ret - - logging.fatal("ERROR: Input did not pass validation (regex: " + - validation_regex + "). Please try again.") - - -def confirm(args, question="Continue?", default=False, no_assumptions=False): - """ - Convenience wrapper around ask for simple yes-no questions with validation. - - :param no_assumptions: ask for confirmation, even if "pmbootstrap -y' - is set - :returns: True for "y", False for "n" - """ - default_str = "y" if default else "n" - if args.assume_yes and not no_assumptions: - logging.info(question + " (y/n) [" + default_str + "]: y") - return True - answer = ask(question, ["y", "n"], default_str, True, "(y|n)") - return answer == "y" - - -def progress_print(args, progress): - """ - Print a snapshot of a progress bar to STDOUT. Call progress_flush to end - printing progress and clear the line. No output is printed in - non-interactive mode. - - :param progress: completion percentage as a number between 0 and 1 - """ - width = 79 - try: - width = os.get_terminal_size().columns - 6 - except OSError: - pass - chars = int(width * progress) - filled = "\u2588" * chars - empty = " " * (width - chars) - percent = int(progress * 100) - if pmb.config.is_interactive and not args.details_to_stdout: - sys.stdout.write(f"\u001b7{percent:>3}% {filled}{empty}") - sys.stdout.flush() - sys.stdout.write("\u001b8\u001b[0K") - - -def progress_flush(args): - """ - Finish printing a progress bar. This will erase the line. Does nothing in - non-interactive mode. - """ - if pmb.config.is_interactive and not args.details_to_stdout: - sys.stdout.flush() diff --git a/pmb/helpers/devices.py b/pmb/helpers/devices.py deleted file mode 100644 index b0583047..00000000 --- a/pmb/helpers/devices.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import glob -import pmb.parse - - -def find_path(args, codename, file=''): - """ - Find path to device APKBUILD under `device/*/device-`. - :param codename: device codename - :param file: file to look for (e.g. APKBUILD or deviceinfo), may be empty - :returns: path to APKBUILD - """ - g = glob.glob(args.aports + "/device/*/device-" + codename + '/' + file) - if not g: - return None - - if len(g) != 1: - raise RuntimeError(codename + " found multiple times in the device" - " subdirectory of pmaports") - - return g[0] - - -def list_codenames(args, vendor=None, unmaintained=True): - """ - Get all devices, for which aports are available - :param vendor: vendor name to choose devices from, or None for all vendors - :param unmaintained: include unmaintained devices - :returns: ["first-device", "second-device", ...] - """ - ret = [] - for path in glob.glob(args.aports + "/device/*/device-*"): - if not unmaintained and '/unmaintained/' in path: - continue - device = os.path.basename(path).split("-", 1)[1] - if (vendor is None) or device.startswith(vendor + '-'): - ret.append(device) - return ret - - -def list_vendors(args): - """ - Get all device vendors, for which aports are available - :returns: {"vendor1", "vendor2", ...} - """ - ret = set() - for path in glob.glob(args.aports + "/device/*/device-*"): - vendor = os.path.basename(path).split("-", 2)[1] - ret.add(vendor) - return ret - - -def list_apkbuilds(args): - """ - :returns: { "first-device": {"pkgname": ..., "pkgver": ...}, ... } - """ - ret = {} - for device in list_codenames(args): - apkbuild_path = f"{args.aports}/device/*/device-{device}/APKBUILD" - ret[device] = pmb.parse.apkbuild(apkbuild_path) - return ret - - -def list_deviceinfos(args): - """ - :returns: { "first-device": {"name": ..., "screen_width": ...}, ... } - """ - ret = {} - for device in list_codenames(args): - ret[device] = pmb.parse.deviceinfo(args, device) - return ret diff --git a/pmb/helpers/file.py b/pmb/helpers/file.py deleted file mode 100644 index c6a0ec4a..00000000 --- a/pmb/helpers/file.py +++ /dev/null @@ -1,103 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later - -import os -import time - -import pmb.helpers.run - - -def replace(path, old, new): - text = "" - with open(path, "r", encoding="utf-8") as handle: - text = handle.read() - - text = text.replace(old, new) - - with open(path, "w", encoding="utf-8") as handle: - handle.write(text) - - -def replace_apkbuild(args, pkgname, key, new, in_quotes=False): - """ Replace one key=value line in an APKBUILD and verify it afterwards. - :param pkgname: package name, e.g. "hello-world" - :param key: key that should be replaced, e.g. "pkgver" - :param new: new value - :param in_quotes: expect the value to be in quotation marks ("") """ - # Read old value - path = pmb.helpers.pmaports.find(args, pkgname) + "/APKBUILD" - apkbuild = pmb.parse.apkbuild(path) - old = apkbuild[key] - - # Prepare old/new strings - if in_quotes: - line_old = '{}="{}"'.format(key, old) - line_new = '{}="{}"'.format(key, new) - else: - line_old = '{}={}'.format(key, old) - line_new = '{}={}'.format(key, new) - - # Replace - replace(path, "\n" + line_old + "\n", "\n" + line_new + "\n") - - # Verify - del (pmb.helpers.other.cache["apkbuild"][path]) - apkbuild = pmb.parse.apkbuild(path) - if apkbuild[key] != str(new): - raise RuntimeError("Failed to set '{}' for pmaport '{}'. Make sure" - " that there's a line with exactly the string '{}'" - " and nothing else in: {}".format(key, pkgname, - line_old, path)) - - -def is_up_to_date(path_sources, path_target=None, lastmod_target=None): - """ - Check if a file is up-to-date by comparing the last modified timestamps - (just like make does it). - - :param path_sources: list of full paths to the source files - :param path_target: full path to the target file - :param lastmod_target: the timestamp of the target file. specify this as - alternative to specifying path_target. - """ - - if path_target and lastmod_target: - raise RuntimeError( - "Specify path_target *or* lastmod_target, not both!") - - lastmod_source = None - for path_source in path_sources: - lastmod = os.path.getmtime(path_source) - if not lastmod_source or lastmod > lastmod_source: - lastmod_source = lastmod - - if path_target: - lastmod_target = os.path.getmtime(path_target) - - return lastmod_target >= lastmod_source - - -def is_older_than(path, seconds): - """ - Check if a single file is older than a given amount of seconds. - """ - if not os.path.exists(path): - return True - lastmod = os.path.getmtime(path) - return lastmod + seconds < time.time() - - -def symlink(args, file, link): - """ - Checks if the symlink is already present, otherwise create it. - """ - if os.path.exists(link): - if (os.path.islink(link) and - os.path.realpath(os.readlink(link)) == os.path.realpath(file)): - return - raise RuntimeError("File exists: " + link) - elif os.path.islink(link): - os.unlink(link) - - # Create the symlink - pmb.helpers.run.user(args, ["ln", "-s", file, link]) diff --git a/pmb/helpers/frontend.py b/pmb/helpers/frontend.py deleted file mode 100644 index 5bbf863c..00000000 --- a/pmb/helpers/frontend.py +++ /dev/null @@ -1,653 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import glob -import json -import logging -import os -import sys - -import pmb.aportgen -import pmb.build -import pmb.build.autodetect -import pmb.chroot -import pmb.chroot.initfs -import pmb.chroot.other -import pmb.ci -import pmb.config -import pmb.export -import pmb.flasher -import pmb.helpers.aportupgrade -import pmb.helpers.devices -import pmb.helpers.git -import pmb.helpers.lint -import pmb.helpers.logging -import pmb.helpers.pkgrel_bump -import pmb.helpers.pmaports -import pmb.helpers.repo -import pmb.helpers.repo_missing -import pmb.helpers.run -import pmb.helpers.status -import pmb.install -import pmb.install.blockdevice -import pmb.netboot -import pmb.parse -import pmb.qemu -import pmb.sideload - - -def _parse_flavor(args, autoinstall=True): - """ - Verify the flavor argument if specified, or return a default value. - :param autoinstall: make sure that at least one kernel flavor is installed - """ - # Install a kernel and get its "flavor", where flavor is a pmOS-specific - # identifier that is typically in the form - # "postmarketos--", e.g. - # "postmarketos-qcom-sdm845" - suffix = "rootfs_" + args.device - flavor = pmb.chroot.other.kernel_flavor_installed( - args, suffix, autoinstall) - - if not flavor: - raise RuntimeError( - "No kernel flavors installed in chroot " + suffix + "! Please let" - " your device package depend on a package starting with 'linux-'.") - return flavor - - -def _parse_suffix(args): - if "rootfs" in args and args.rootfs: - return "rootfs_" + args.device - elif args.buildroot: - if args.buildroot == "device": - return "buildroot_" + args.deviceinfo["arch"] - else: - return "buildroot_" + args.buildroot - elif args.suffix: - return args.suffix - else: - return "native" - - -def _install_ondev_verify_no_rootfs(args): - chroot_dest = "/var/lib/rootfs.img" - dest = f"{args.work}/chroot_installer_{args.device}{chroot_dest}" - if os.path.exists(dest): - return - - if args.ondev_cp: - for _, chroot_dest_cp in args.ondev_cp: - if chroot_dest_cp == chroot_dest: - return - - raise ValueError(f"--no-rootfs set, but rootfs.img not found in install" - " chroot. Either run 'pmbootstrap install' without" - " --no-rootfs first to let it generate the postmarketOS" - " rootfs once, or supply a rootfs file with:" - f" --cp os.img:{chroot_dest}") - - -def aportgen(args): - for package in args.packages: - logging.info("Generate aport: " + package) - pmb.aportgen.generate(args, package) - - -def build(args): - # Strict mode: zap everything - if args.strict: - pmb.chroot.zap(args, False) - - if args.envkernel: - pmb.build.envkernel.package_kernel(args) - return - - # Set src and force - src = os.path.realpath(os.path.expanduser(args.src[0])) \ - if args.src else None - force = True if src else args.force - if src and not os.path.exists(src): - raise RuntimeError("Invalid path specified for --src: " + src) - - # Build all packages - for package in args.packages: - arch_package = args.arch or pmb.build.autodetect.arch(args, package) - if not pmb.build.package(args, package, arch_package, force, - args.strict, src=src): - logging.info("NOTE: Package '" + package + "' is up to date. Use" - " 'pmbootstrap build " + package + " --force'" - " if needed.") - - -def build_init(args): - suffix = _parse_suffix(args) - pmb.build.init(args, suffix) - - -def checksum(args): - for package in args.packages: - if args.verify: - pmb.build.checksum.verify(args, package) - else: - pmb.build.checksum.update(args, package) - - -def sideload(args): - arch = args.deviceinfo["arch"] - if args.arch: - arch = args.arch - user = args.user - host = args.host - pmb.sideload.sideload(args, user, host, args.port, arch, args.install_key, - args.packages) - - -def netboot(args): - if args.action_netboot == "serve": - pmb.netboot.start_nbd_server(args) - - -def chroot(args): - # Suffix - suffix = _parse_suffix(args) - if (args.user and suffix != "native" and - not suffix.startswith("buildroot_")): - raise RuntimeError("--user is only supported for native or" - " buildroot_* chroots.") - if args.xauth and suffix != "native": - raise RuntimeError("--xauth is only supported for native chroot.") - - # apk: check minimum version, install packages - pmb.chroot.apk.check_min_version(args, suffix) - if args.add: - pmb.chroot.apk.install(args, args.add.split(","), suffix) - - # Xauthority - env = {} - if args.xauth: - pmb.chroot.other.copy_xauthority(args) - env["DISPLAY"] = os.environ.get("DISPLAY") - env["XAUTHORITY"] = "/home/pmos/.Xauthority" - - # Install blockdevice - if args.install_blockdev: - size_boot = 128 # 128 MiB - size_root = 4096 # 4 GiB - size_reserve = 2048 # 2 GiB - pmb.install.blockdevice.create_and_mount_image(args, size_boot, - size_root, size_reserve) - - # Run the command as user/root - if args.user: - logging.info("(" + suffix + ") % su pmos -c '" + - " ".join(args.command) + "'") - pmb.chroot.user(args, args.command, suffix, output=args.output, - env=env) - else: - logging.info("(" + suffix + ") % " + " ".join(args.command)) - pmb.chroot.root(args, args.command, suffix, output=args.output, - env=env) - - -def config(args): - keys = pmb.config.config_keys - if args.name and args.name not in keys: - logging.info("NOTE: Valid config keys: " + ", ".join(keys)) - raise RuntimeError("Invalid config key: " + args.name) - - cfg = pmb.config.load(args) - if args.reset: - if args.name is None: - raise RuntimeError("config --reset requires a name to be given.") - value = pmb.config.defaults[args.name] - cfg["pmbootstrap"][args.name] = value - logging.info(f"Config changed to default: {args.name}='{value}'") - pmb.config.save(args, cfg) - elif args.value is not None: - cfg["pmbootstrap"][args.name] = args.value - logging.info("Config changed: " + args.name + "='" + args.value + "'") - pmb.config.save(args, cfg) - elif args.name: - value = cfg["pmbootstrap"].get(args.name, "") - print(value) - else: - cfg.write(sys.stdout) - - # Don't write the "Done" message - pmb.helpers.logging.disable() - - -def repo_missing(args): - missing = pmb.helpers.repo_missing.generate(args, args.arch, args.overview, - args.package, args.built) - print(json.dumps(missing, indent=4)) - - -def index(args): - pmb.build.index_repo(args) - - -def initfs(args): - pmb.chroot.initfs.frontend(args) - - -def install(args): - if args.no_fde: - logging.warning("WARNING: --no-fde is deprecated," - " as it is now the default.") - if args.rsync and args.full_disk_encryption: - raise ValueError("Installation using rsync is not compatible with full" - " disk encryption.") - if args.rsync and not args.disk: - raise ValueError("Installation using rsync only works with --disk.") - - # On-device installer checks - # Note that this can't be in the mutually exclusive group that has most of - # the conflicting options, because then it would not work with --disk. - if args.on_device_installer: - if args.full_disk_encryption: - raise ValueError("--on-device-installer cannot be combined with" - " --fde. The user can choose to encrypt their" - " installation later in the on-device installer.") - if args.android_recovery_zip: - raise ValueError("--on-device-installer cannot be combined with" - " --android-recovery-zip (patches welcome)") - if args.no_image: - raise ValueError("--on-device-installer cannot be combined with" - " --no-image") - if args.rsync: - raise ValueError("--on-device-installer cannot be combined with" - " --rsync") - if args.filesystem: - raise ValueError("--on-device-installer cannot be combined with" - " --filesystem") - - if args.deviceinfo["cgpt_kpart"]: - raise ValueError("--on-device-installer cannot be used with" - " ChromeOS devices") - else: - if args.ondev_cp: - raise ValueError("--cp can only be combined with --ondev") - if args.ondev_no_rootfs: - raise ValueError("--no-rootfs can only be combined with --ondev." - " Do you mean --no-image?") - if args.ondev_no_rootfs: - _install_ondev_verify_no_rootfs(args) - - # On-device installer overrides - if args.on_device_installer: - # To make code for the on-device installer not needlessly complex, just - # hardcode "user" as username here. (The on-device installer will set - # a password for the user, disable SSH password authentication, - # optionally add a new user for SSH that must not have the same - # username etc.) - if args.user != "user": - logging.warning(f"WARNING: custom username '{args.user}' will be" - " replaced with 'user' for the on-device" - " installer.") - args.user = "user" - - if not args.disk and args.split is None: - # Default to split if the flash method requires it - flasher = pmb.config.flashers.get(args.deviceinfo["flash_method"], {}) - if flasher.get("split", False): - args.split = True - - # Android recovery zip related - if args.android_recovery_zip and args.filesystem: - raise ValueError("--android-recovery-zip cannot be combined with" - " --filesystem (patches welcome)") - if args.android_recovery_zip and args.full_disk_encryption: - logging.info("WARNING: --fde is rarely used in combination with" - " --android-recovery-zip. If this does not work, consider" - " using another method (e.g. installing via netcat)") - logging.info("WARNING: the kernel of the recovery system (e.g. TWRP)" - f" must support the cryptsetup cipher '{args.cipher}'.") - logging.info("If you know what you are doing, consider setting a" - " different cipher with 'pmbootstrap install --cipher=..." - " --fde --android-recovery-zip'.") - - # Don't install locally compiled packages and package signing keys - if not args.install_local_pkgs: - # Implies that we don't build outdated packages (overriding the answer - # in 'pmbootstrap init') - args.build_pkgs_on_install = False - - # Safest way to avoid installing local packages is having none - if glob.glob(f"{args.work}/packages/*"): - raise ValueError("--no-local-pkgs specified, but locally built" - " packages found. Consider 'pmbootstrap zap -p'" - " to delete them.") - - # Verify that the root filesystem is supported by current pmaports branch - pmb.install.get_root_filesystem(args) - - pmb.install.install(args) - - -def flasher(args): - pmb.flasher.frontend(args) - - -def export(args): - pmb.export.frontend(args) - - -def update(args): - existing_only = not args.non_existing - if not pmb.helpers.repo.update(args, args.arch, True, existing_only): - logging.info("No APKINDEX files exist, so none have been updated." - " The pmbootstrap command downloads the APKINDEX files on" - " demand.") - logging.info("If you want to force downloading the APKINDEX files for" - " all architectures (not recommended), use:" - " pmbootstrap update --non-existing") - - -def newapkbuild(args): - # Check for SRCURL usage - is_url = False - for prefix in ["http://", "https://", "ftp://"]: - if args.pkgname_pkgver_srcurl.startswith(prefix): - is_url = True - break - - # Sanity check: -n is only allowed with SRCURL - if args.pkgname and not is_url: - raise RuntimeError("You can only specify a pkgname (-n) when using" - " SRCURL as last parameter.") - - # Passthrough: Strings (e.g. -d "my description") - pass_through = [] - for entry in pmb.config.newapkbuild_arguments_strings: - value = getattr(args, entry[1]) - if value: - pass_through += [entry[0], value] - - # Passthrough: Switches (e.g. -C for CMake) - for entry in (pmb.config.newapkbuild_arguments_switches_pkgtypes + - pmb.config.newapkbuild_arguments_switches_other): - if getattr(args, entry[1]) is True: - pass_through.append(entry[0]) - - # Passthrough: PKGNAME[-PKGVER] | SRCURL - pass_through.append(args.pkgname_pkgver_srcurl) - pmb.build.newapkbuild(args, args.folder, pass_through, args.force) - - -def kconfig(args): - if args.action_kconfig == "check": - details = args.kconfig_check_details - # Build the components list from cli arguments (--waydroid etc.) - components_list = [] - for name in pmb.parse.kconfig.get_all_component_names(): - if getattr(args, f"kconfig_check_{name}"): - components_list += [name] - - # Handle passing a file directly - if args.file: - if pmb.parse.kconfig.check_file(args.file, components_list, - details=details): - logging.info("kconfig check succeeded!") - return - raise RuntimeError("kconfig check failed!") - - # Default to all kernel packages - packages = args.package - if not args.package: - for aport in pmb.helpers.pmaports.get_list(args): - if aport.startswith("linux-"): - packages.append(aport.split("linux-")[1]) - - # Iterate over all kernels - error = False - skipped = 0 - packages.sort() - for package in packages: - if not args.force: - pkgname = package if package.startswith("linux-") \ - else "linux-" + package - aport = pmb.helpers.pmaports.find(args, pkgname) - apkbuild = pmb.parse.apkbuild(f"{aport}/APKBUILD") - if "!pmb:kconfigcheck" in apkbuild["options"]: - skipped += 1 - continue - if not pmb.parse.kconfig.check(args, package, components_list, - details=details): - error = True - - # At least one failure - if error: - raise RuntimeError("kconfig check failed!") - else: - if skipped: - logging.info("NOTE: " + str(skipped) + " kernel(s) was skipped" - " (consider 'pmbootstrap kconfig check -f')") - logging.info("kconfig check succeeded!") - elif args.action_kconfig in ["edit", "migrate"]: - if args.package: - pkgname = args.package - else: - pkgname = args.deviceinfo["codename"] - use_oldconfig = args.action_kconfig == "migrate" - pmb.build.menuconfig(args, pkgname, use_oldconfig) - - -def deviceinfo_parse(args): - # Default to all devices - devices = args.devices - if not devices: - devices = pmb.helpers.devices.list_codenames(args) - - # Iterate over all devices - kernel = args.deviceinfo_parse_kernel - for device in devices: - print(f"{device}, with kernel={kernel}:") - print(json.dumps(pmb.parse.deviceinfo(args, device, kernel), indent=4, - sort_keys=True)) - - -def apkbuild_parse(args): - # Default to all packages - packages = args.packages - if not packages: - packages = pmb.helpers.pmaports.get_list(args) - - # Iterate over all packages - for package in packages: - print(package + ":") - aport = pmb.helpers.pmaports.find(args, package) - path = aport + "/APKBUILD" - print(json.dumps(pmb.parse.apkbuild(path), indent=4, - sort_keys=True)) - - -def apkindex_parse(args): - result = pmb.parse.apkindex.parse(args.apkindex_path) - if args.package: - if args.package not in result: - raise RuntimeError("Package not found in the APKINDEX: " + - args.package) - result = result[args.package] - print(json.dumps(result, indent=4)) - - -def pkgrel_bump(args): - would_bump = True - if args.auto: - would_bump = pmb.helpers.pkgrel_bump.auto(args, args.dry) - else: - # Each package must exist - for package in args.packages: - pmb.helpers.pmaports.find(args, package) - - # Increase pkgrel - for package in args.packages: - pmb.helpers.pkgrel_bump.package(args, package, dry=args.dry) - - if args.dry and would_bump: - logging.info("Pkgrels of package(s) would have been bumped!") - sys.exit(1) - - -def aportupgrade(args): - if args.all or args.all_stable or args.all_git: - pmb.helpers.aportupgrade.upgrade_all(args) - else: - # Each package must exist - for package in args.packages: - pmb.helpers.pmaports.find(args, package) - - # Check each package for a new version - for package in args.packages: - pmb.helpers.aportupgrade.upgrade(args, package) - - -def qemu(args): - pmb.qemu.run(args) - - -def shutdown(args): - pmb.chroot.shutdown(args) - - -def stats(args): - # Chroot suffix - suffix = "native" - if args.arch != pmb.config.arch_native: - suffix = "buildroot_" + args.arch - - # Install ccache and display stats - pmb.chroot.apk.install(args, ["ccache"], suffix) - logging.info("(" + suffix + ") % ccache -s") - pmb.chroot.user(args, ["ccache", "-s"], suffix, output="stdout") - - -def work_migrate(args): - # do nothing (pmb/__init__.py already did the migration) - pmb.helpers.logging.disable() - - -def log(args): - log_testsuite = f"{args.work}/log_testsuite.txt" - - if args.clear_log: - pmb.helpers.run.user(args, ["truncate", "-s", "0", args.log]) - pmb.helpers.run.user(args, ["truncate", "-s", "0", log_testsuite]) - - cmd = ["tail", "-n", args.lines, "-F"] - - # Follow the testsuite's log file too if it exists. It will be created when - # starting a test case that writes to it (git -C test grep log_testsuite). - if os.path.exists(log_testsuite): - cmd += [log_testsuite] - - # tail writes the last lines of the files to the terminal. Put the regular - # log at the end, so that output is visible at the bottom (where the user - # looks for an error / what's currently going on). - cmd += [args.log] - - pmb.helpers.run.user(args, cmd, output="tui") - - -def zap(args): - pmb.chroot.zap(args, dry=args.dry, http=args.http, - distfiles=args.distfiles, pkgs_local=args.pkgs_local, - pkgs_local_mismatch=args.pkgs_local_mismatch, - pkgs_online_mismatch=args.pkgs_online_mismatch, - rust=args.rust, netboot=args.netboot) - - # Don't write the "Done" message - pmb.helpers.logging.disable() - - -def bootimg_analyze(args): - bootimg = pmb.parse.bootimg(args, args.path) - tmp_output = "Put these variables in the deviceinfo file of your device:\n" - for line in pmb.aportgen.device.\ - generate_deviceinfo_fastboot_content(bootimg).split("\n"): - tmp_output += "\n" + line.lstrip() - logging.info(tmp_output) - - -def pull(args): - failed = [] - for repo in pmb.config.git_repos.keys(): - if pmb.helpers.git.pull(args, repo) < 0: - failed.append(repo) - - if not failed: - return True - - logging.info("---") - logging.info("WARNING: failed to update: " + ", ".join(failed)) - logging.info("") - logging.info("'pmbootstrap pull' will only update the repositories, if:") - logging.info("* they are on an officially supported branch (e.g. master)") - logging.info("* the history is not conflicting (fast-forward is possible)") - logging.info("* the git workdirs are clean") - logging.info("You have changed mentioned repositories, so they don't meet") - logging.info("these conditions anymore.") - logging.info("") - logging.info("Fix and try again:") - for name_repo in failed: - logging.info("* " + pmb.helpers.git.get_path(args, name_repo)) - logging.info("---") - return False - - -def lint(args): - packages = args.packages - if not packages: - packages = pmb.helpers.pmaports.get_list(args) - - pmb.helpers.lint.check(args, packages) - - -def status(args): - if not pmb.helpers.status.print_status(args, args.details): - sys.exit(1) - - -def ci(args): - topdir = pmb.helpers.git.get_topdir(args, os.getcwd()) - if not os.path.exists(topdir): - logging.error("ERROR: change your current directory to a git" - " repository (e.g. pmbootstrap, pmaports) before running" - " 'pmbootstrap ci'.") - exit(1) - - scripts_available = pmb.ci.get_ci_scripts(topdir) - scripts_available = pmb.ci.sort_scripts_by_speed(scripts_available) - if not scripts_available: - logging.error("ERROR: no supported CI scripts found in current git" - " repository, see https://postmarketos.org/pmb-ci") - exit(1) - - scripts_selected = {} - if args.scripts: - if args.all: - raise RuntimeError("Combining --all with script names doesn't" - " make sense") - for script in args.scripts: - if script not in scripts_available: - logging.error(f"ERROR: script '{script}' not found in git" - " repository, found these:" - f" {', '.join(scripts_available.keys())}") - exit(1) - scripts_selected[script] = scripts_available[script] - elif args.all: - scripts_selected = scripts_available - - if args.fast: - for script, script_data in scripts_available.items(): - if "slow" not in script_data["options"]: - scripts_selected[script] = script_data - - if not pmb.helpers.git.clean_worktree(args, topdir): - logging.warning("WARNING: this git repository has uncommitted changes") - - if not scripts_selected: - scripts_selected = pmb.ci.ask_which_scripts_to_run(scripts_available) - - pmb.ci.run_scripts(args, topdir, scripts_selected) diff --git a/pmb/helpers/git.py b/pmb/helpers/git.py deleted file mode 100644 index f04901b7..00000000 --- a/pmb/helpers/git.py +++ /dev/null @@ -1,274 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import configparser -import logging -import os -import time - -import pmb.build -import pmb.chroot.apk -import pmb.config -import pmb.helpers.pmaports -import pmb.helpers.run - - -def get_path(args, name_repo): - """ Get the path to the repository, which is either the default one in the - work dir, or a user-specified one in args. - - :returns: full path to repository """ - if name_repo == "pmaports": - return args.aports - return args.work + "/cache_git/" + name_repo - - -def clone(args, name_repo): - """ Clone a git repository to $WORK/cache_git/$name_repo (or to the - overridden path set in args, as with pmbootstrap --aports). - - :param name_repo: short alias used for the repository name, from - pmb.config.git_repos (e.g. "aports_upstream", - "pmaports") """ - # Check for repo name in the config - if name_repo not in pmb.config.git_repos: - raise ValueError("No git repository configured for " + name_repo) - - path = get_path(args, name_repo) - if not os.path.exists(path): - # Build git command - url = pmb.config.git_repos[name_repo] - command = ["git", "clone"] - command += [url, path] - - # Create parent dir and clone - logging.info("Clone git repository: " + url) - os.makedirs(args.work + "/cache_git", exist_ok=True) - pmb.helpers.run.user(args, command, output="stdout") - - # FETCH_HEAD does not exist after initial clone. Create it, so - # is_outdated() can use it. - fetch_head = path + "/.git/FETCH_HEAD" - if not os.path.exists(fetch_head): - open(fetch_head, "w").close() - - -def rev_parse(args, path, revision="HEAD", extra_args: list = []): - """ Run "git rev-parse" in a specific repository dir. - - :param path: to the git repository - :param extra_args: additional arguments for "git rev-parse". Pass - "--abbrev-ref" to get the branch instead of the - commit, if possible. - :returns: commit string like "90cd0ad84d390897efdcf881c0315747a4f3a966" - or (with --abbrev-ref): the branch name, e.g. "master" """ - command = ["git", "rev-parse"] + extra_args + [revision] - rev = pmb.helpers.run.user(args, command, path, output_return=True) - return rev.rstrip() - - -def can_fast_forward(args, path, branch_upstream, branch="HEAD"): - command = ["git", "merge-base", "--is-ancestor", branch, branch_upstream] - ret = pmb.helpers.run.user(args, command, path, check=False) - if ret == 0: - return True - elif ret == 1: - return False - else: - raise RuntimeError("Unexpected exit code from git: " + str(ret)) - - -def clean_worktree(args, path): - """ Check if there are not any modified files in the git dir. """ - command = ["git", "status", "--porcelain"] - return pmb.helpers.run.user(args, command, path, output_return=True) == "" - - -def get_upstream_remote(args, name_repo): - """ Find the remote, which matches the git URL from the config. Usually - "origin", but the user may have set up their git repository - differently. """ - url = pmb.config.git_repos[name_repo] - path = get_path(args, name_repo) - command = ["git", "remote", "-v"] - output = pmb.helpers.run.user(args, command, path, output_return=True) - for line in output.split("\n"): - if url in line: - return line.split("\t", 1)[0] - raise RuntimeError("{}: could not find remote name for URL '{}' in git" - " repository: {}".format(name_repo, url, path)) - - -def parse_channels_cfg(args): - """ Parse channels.cfg from pmaports.git, origin/master branch. - Reference: https://postmarketos.org/channels.cfg - :returns: dict like: {"meta": {"recommended": "edge"}, - "channels": {"edge": {"description": ..., - "branch_pmaports": ..., - "branch_aports": ..., - "mirrordir_alpine": ...}, - ...}} """ - # Cache during one pmbootstrap run - cache_key = "pmb.helpers.git.parse_channels_cfg" - if pmb.helpers.other.cache[cache_key]: - return pmb.helpers.other.cache[cache_key] - - # Read with configparser - cfg = configparser.ConfigParser() - if args.config_channels: - cfg.read([args.config_channels]) - else: - remote = get_upstream_remote(args, "pmaports") - command = ["git", "show", f"{remote}/master:channels.cfg"] - stdout = pmb.helpers.run.user(args, command, args.aports, - output_return=True, check=False) - try: - cfg.read_string(stdout) - except configparser.MissingSectionHeaderError: - logging.info("NOTE: fix this by fetching your pmaports.git, e.g." - " with 'pmbootstrap pull'") - raise RuntimeError("Failed to read channels.cfg from" - f" '{remote}/master' branch of your local" - " pmaports clone") - - # Meta section - ret = {"channels": {}} - ret["meta"] = {"recommended": cfg.get("channels.cfg", "recommended")} - - # Channels - for channel in cfg.sections(): - if channel == "channels.cfg": - continue # meta section - - channel_new = pmb.helpers.pmaports.get_channel_new(channel) - - ret["channels"][channel_new] = {} - for key in ["description", "branch_pmaports", "branch_aports", - "mirrordir_alpine"]: - value = cfg.get(channel, key) - ret["channels"][channel_new][key] = value - - pmb.helpers.other.cache[cache_key] = ret - return ret - - -def get_branches_official(args, name_repo): - """ Get all branches that point to official release channels. - :returns: list of supported branches, e.g. ["master", "3.11"] """ - # This functions gets called with pmaports and aports_upstream, because - # both are displayed in "pmbootstrap status". But it only makes sense - # to display pmaports there, related code will be refactored soon (#1903). - if name_repo != "pmaports": - return ["master"] - - channels_cfg = parse_channels_cfg(args) - ret = [] - for channel, channel_data in channels_cfg["channels"].items(): - ret.append(channel_data["branch_pmaports"]) - return ret - - -def pull(args, name_repo): - """ Check if on official branch and essentially try 'git pull --ff-only'. - Instead of really doing 'git pull --ff-only', do it in multiple steps - (fetch, merge --ff-only), so we can display useful messages depending - on which part fails. - - :returns: integer, >= 0 on success, < 0 on error """ - branches_official = get_branches_official(args, name_repo) - - # Skip if repo wasn't cloned - path = get_path(args, name_repo) - if not os.path.exists(path): - logging.debug(name_repo + ": repo was not cloned, skipping pull!") - return 1 - - # Skip if not on official branch - branch = rev_parse(args, path, extra_args=["--abbrev-ref"]) - msg_start = "{} (branch: {}):".format(name_repo, branch) - if branch not in branches_official: - logging.warning("{} not on one of the official branches ({}), skipping" - " pull!" - "".format(msg_start, ", ".join(branches_official))) - return -1 - - # Skip if workdir is not clean - if not clean_worktree(args, path): - logging.warning(msg_start + " workdir is not clean, skipping pull!") - return -2 - - # Skip if branch is tracking different remote - branch_upstream = get_upstream_remote(args, name_repo) + "/" + branch - remote_ref = rev_parse(args, path, branch + "@{u}", ["--abbrev-ref"]) - if remote_ref != branch_upstream: - logging.warning("{} is tracking unexpected remote branch '{}' instead" - " of '{}'".format(msg_start, remote_ref, - branch_upstream)) - return -3 - - # Fetch (exception on failure, meaning connection to server broke) - logging.info(msg_start + " git pull --ff-only") - if not args.offline: - pmb.helpers.run.user(args, ["git", "fetch"], path) - - # Skip if already up to date - if rev_parse(args, path, branch) == rev_parse(args, path, branch_upstream): - logging.info(msg_start + " already up to date") - return 2 - - # Skip if we can't fast-forward - if not can_fast_forward(args, path, branch_upstream): - logging.warning("{} can't fast-forward to {}, looks like you changed" - " the git history of your local branch. Skipping pull!" - "".format(msg_start, branch_upstream)) - return -4 - - # Fast-forward now (should not fail due to checks above, so it's fine to - # throw an exception on error) - command = ["git", "merge", "--ff-only", branch_upstream] - pmb.helpers.run.user(args, command, path, "stdout") - return 0 - - -def is_outdated(path): - # FETCH_HEAD always exists in repositories cloned by pmbootstrap. - # Usually it does not (before first git fetch/pull), but there is no good - # fallback. For exampe, getting the _creation_ date of .git/HEAD is non- - # trivial with python on linux (https://stackoverflow.com/a/39501288). - # Note that we have to assume here that the user had fetched the "origin" - # repository. If the user fetched another repository, FETCH_HEAD would also - # get updated, even though "origin" may be outdated. For pmbootstrap status - # it is good enough, because it should help the users that are not doing - # much with pmaports.git to know when it is outdated. People who manually - # fetch other repos should usually know that and how to handle that - # situation. - path_head = path + "/.git/FETCH_HEAD" - date_head = os.path.getmtime(path_head) - - date_outdated = time.time() - pmb.config.git_repo_outdated - return date_head <= date_outdated - - -def get_topdir(args, path): - """ :returns: a string with the top dir of the git repository, or an - empty string if it's not a git repository. """ - return pmb.helpers.run.user(args, ["git", "rev-parse", "--show-toplevel"], - path, output_return=True, check=False).rstrip() - - -def get_files(args, path): - """ Get all files inside a git repository, that are either already in the - git tree or are not in gitignore. Do not list deleted files. To be used - for creating a tarball of the git repository. - :param path: top dir of the git repository - :returns: all files in a git repository as list, relative to path """ - ret = [] - files = pmb.helpers.run.user(args, ["git", "ls-files"], path, - output_return=True).split("\n") - files += pmb.helpers.run.user(args, ["git", "ls-files", - "--exclude-standard", "--other"], path, - output_return=True).split("\n") - for file in files: - if os.path.exists(f"{path}/{file}"): - ret += [file] - - return ret diff --git a/pmb/helpers/http.py b/pmb/helpers/http.py deleted file mode 100644 index fe0f7c58..00000000 --- a/pmb/helpers/http.py +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import hashlib -import json -import logging -import os -import shutil -import urllib.request - -import pmb.helpers.run - - -def download(args, url, prefix, cache=True, loglevel=logging.INFO, - allow_404=False): - """ Download a file to disk. - - :param url: the http(s) address of to the file to download - :param prefix: for the cache, to make it easier to find (cache files - get a hash of the URL after the prefix) - :param cache: if True, and url is cached, do not download it again - :param loglevel: change to logging.DEBUG to only display the download - message in 'pmbootstrap log', not in stdout. We use - this when downloading many APKINDEX files at once, no - point in showing a dozen messages. - :param allow_404: do not raise an exception when the server responds - with a 404 Not Found error. Only display a warning on - stdout (no matter if loglevel is changed). - :returns: path to the downloaded file in the cache or None on 404 """ - # Create cache folder - if not os.path.exists(args.work + "/cache_http"): - pmb.helpers.run.user(args, ["mkdir", "-p", args.work + "/cache_http"]) - - # Check if file exists in cache - prefix = prefix.replace("/", "_") - path = (args.work + "/cache_http/" + prefix + "_" + - hashlib.sha256(url.encode("utf-8")).hexdigest()) - if os.path.exists(path): - if cache: - return path - pmb.helpers.run.user(args, ["rm", path]) - - # Offline and not cached - if args.offline: - raise RuntimeError("File not found in cache and offline flag is" - f" enabled: {url}") - - # Download the file - logging.log(loglevel, "Download " + url) - try: - with urllib.request.urlopen(url) as response: - with open(path, "wb") as handle: - shutil.copyfileobj(response, handle) - # Handle 404 - except urllib.error.HTTPError as e: - if e.code == 404 and allow_404: - logging.warning("WARNING: file not found: " + url) - return None - raise - - # Return path in cache - return path - - -def retrieve(url, headers=None, allow_404=False): - """ Fetch the content of a URL and returns it as string. - - :param url: the http(s) address of to the resource to fetch - :param headers: dict of HTTP headers to use - :param allow_404: do not raise an exception when the server responds - with a 404 Not Found error. Only display a warning - :returns: str with the content of the response - """ - # Download the file - logging.verbose("Retrieving " + url) - - if headers is None: - headers = {} - - req = urllib.request.Request(url, headers=headers) - try: - with urllib.request.urlopen(req) as response: - return response.read() - # Handle 404 - except urllib.error.HTTPError as e: - if e.code == 404 and allow_404: - logging.warning("WARNING: failed to retrieve content from: " + url) - return None - raise - - -def retrieve_json(*args, **kwargs): - """ Fetch the contents of a URL, parse it as JSON and return it. See - retrieve() for the list of all parameters. """ - return json.loads(retrieve(*args, **kwargs)) diff --git a/pmb/helpers/lint.py b/pmb/helpers/lint.py deleted file mode 100644 index 3ba60bec..00000000 --- a/pmb/helpers/lint.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright 2023 Danct12 -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import os - -import pmb.chroot -import pmb.chroot.apk -import pmb.build -import pmb.helpers.run -import pmb.helpers.pmaports - - -def check(args, pkgnames): - """ - Run apkbuild-lint on the supplied packages - - :param pkgnames: Names of the packages to lint - """ - pmb.chroot.apk.install(args, ["atools"]) - - # Mount pmaports.git inside the chroot so that we don't have to copy the - # package folders - pmaports = "/mnt/pmaports" - pmb.build.mount_pmaports(args, pmaports) - - # Locate all APKBUILDs and make the paths be relative to the pmaports - # root - apkbuilds = [] - for pkgname in pkgnames: - aport = pmb.helpers.pmaports.find(args, pkgname) - if not os.path.exists(aport + "/APKBUILD"): - raise ValueError("Path does not contain an APKBUILD file:" + - aport) - relpath = os.path.relpath(aport, args.aports) - apkbuilds.append(f"{relpath}/APKBUILD") - - # Run apkbuild-lint in chroot from the pmaports mount point. This will - # print a nice source identifier à la "./cross/grub-x86/APKBUILD" for - # each violation. - pkgstr = ", ".join(pkgnames) - logging.info(f"(native) linting {pkgstr} with apkbuild-lint") - options = pmb.config.apkbuild_custom_valid_options - return pmb.chroot.root(args, ["apkbuild-lint"] + apkbuilds, - check=False, output="stdout", - output_return=True, - working_dir=pmaports, - env={"CUSTOM_VALID_OPTIONS": " ".join(options)}) diff --git a/pmb/helpers/logging.py b/pmb/helpers/logging.py deleted file mode 100644 index fc9e3d59..00000000 --- a/pmb/helpers/logging.py +++ /dev/null @@ -1,129 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import os -import sys -import pmb.config - -logfd = None - - -class log_handler(logging.StreamHandler): - """ - Write to stdout and to the already opened log file. - """ - _args = None - - def emit(self, record): - try: - msg = self.format(record) - - # INFO or higher: Write to stdout - if (not self._args.details_to_stdout and - not self._args.quiet and - record.levelno >= logging.INFO): - stream = self.stream - - styles = pmb.config.styles - - msg_col = ( - msg.replace( - "NOTE:", - f"{styles['BLUE']}NOTE:{styles['END']}", - 1, - ) - .replace( - "WARNING:", - f"{styles['YELLOW']}WARNING:{styles['END']}", - 1, - ) - .replace( - "ERROR:", - f"{styles['RED']}ERROR:{styles['END']}", - 1, - ) - .replace( - "DONE!", - f"{styles['GREEN']}DONE!{styles['END']}", - 1, - ) - ) - - stream.write(msg_col) - stream.write(self.terminator) - self.flush() - - # Everything: Write to logfd - msg = "(" + str(os.getpid()).zfill(6) + ") " + msg - logfd.write(msg + "\n") - logfd.flush() - - except (KeyboardInterrupt, SystemExit): - raise - except BaseException: - self.handleError(record) - - -def add_verbose_log_level(): - """ - Add a new log level "verbose", which is below "debug". Also monkeypatch - logging, so it can be used with logging.verbose(). - - This function is based on work by Voitek Zylinski and sleepycal: - https://stackoverflow.com/a/20602183 - All stackoverflow user contributions are licensed as CC-BY-SA: - https://creativecommons.org/licenses/by-sa/3.0/ - """ - logging.VERBOSE = 5 - logging.addLevelName(logging.VERBOSE, "VERBOSE") - logging.Logger.verbose = lambda inst, msg, * \ - args, **kwargs: inst.log(logging.VERBOSE, msg, *args, **kwargs) - logging.verbose = lambda msg, *args, **kwargs: logging.log(logging.VERBOSE, - msg, *args, - **kwargs) - - -def init(args): - """ - Set log format and add the log file descriptor to logfd, add the - verbose log level. - """ - global logfd - # Set log file descriptor (logfd) - if args.details_to_stdout: - logfd = sys.stdout - else: - # Require containing directory to exist (so we don't create the work - # folder and break the folder migration logic, which needs to set the - # version upon creation) - dir = os.path.dirname(args.log) - if os.path.exists(dir): - logfd = open(args.log, "a+") - else: - logfd = open(os.devnull, "a+") - if args.action != "init": - print(f"WARNING: Can't create log file in '{dir}', path" - " does not exist!") - - # Set log format - root_logger = logging.getLogger() - root_logger.handlers = [] - formatter = logging.Formatter("[%(asctime)s] %(message)s", - datefmt="%H:%M:%S") - - # Set log level - add_verbose_log_level() - root_logger.setLevel(logging.DEBUG) - if args.verbose: - root_logger.setLevel(logging.VERBOSE) - - # Add a custom log handler - handler = log_handler() - log_handler._args = args - handler.setFormatter(formatter) - root_logger.addHandler(handler) - - -def disable(): - logger = logging.getLogger() - logger.disabled = True diff --git a/pmb/helpers/mount.py b/pmb/helpers/mount.py deleted file mode 100644 index a3203b53..00000000 --- a/pmb/helpers/mount.py +++ /dev/null @@ -1,108 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import pmb.helpers.run - - -def ismount(folder): - """ - Ismount() implementation that works for mount --bind. - Workaround for: https://bugs.python.org/issue29707 - """ - folder = os.path.realpath(os.path.realpath(folder)) - with open("/proc/mounts", "r") as handle: - for line in handle: - words = line.split() - if len(words) >= 2 and words[1] == folder: - return True - if words[0] == folder: - return True - return False - - -def bind(args, source, destination, create_folders=True, umount=False): - """ - Mount --bind a folder and create necessary directory structure. - :param umount: when destination is already a mount point, umount it first. - """ - # Check/umount destination - if ismount(destination): - if umount: - umount_all(args, destination) - else: - return - - # Check/create folders - for path in [source, destination]: - if os.path.exists(path): - continue - if create_folders: - pmb.helpers.run.root(args, ["mkdir", "-p", path]) - else: - raise RuntimeError("Mount failed, folder does not exist: " + - path) - - # Actually mount the folder - pmb.helpers.run.root(args, ["mount", "--bind", source, destination]) - - # Verify that it has worked - if not ismount(destination): - raise RuntimeError("Mount failed: " + source + " -> " + destination) - - -def bind_file(args, source, destination, create_folders=False): - """ - Mount a file with the --bind option, and create the destination file, - if necessary. - """ - # Skip existing mountpoint - if ismount(destination): - return - - # Create empty file - if not os.path.exists(destination): - if create_folders: - dir = os.path.dirname(destination) - if not os.path.isdir(dir): - pmb.helpers.run.root(args, ["mkdir", "-p", dir]) - - pmb.helpers.run.root(args, ["touch", destination]) - - # Mount - pmb.helpers.run.root(args, ["mount", "--bind", source, - destination]) - - -def umount_all_list(prefix, source="/proc/mounts"): - """ - Parses `/proc/mounts` for all folders beginning with a prefix. - :source: can be changed for testcases - :returns: a list of folders that need to be umounted - """ - ret = [] - prefix = os.path.realpath(prefix) - with open(source, "r") as handle: - for line in handle: - words = line.split() - if len(words) < 2: - raise RuntimeError("Failed to parse line in " + source + ": " + - line) - mountpoint = words[1] - if mountpoint.startswith(prefix): - # Remove "\040(deleted)" suffix (#545) - deleted_str = r"\040(deleted)" - if mountpoint.endswith(deleted_str): - mountpoint = mountpoint[:-len(deleted_str)] - ret.append(mountpoint) - ret.sort(reverse=True) - return ret - - -def umount_all(args, folder): - """ - Umount all folders that are mounted inside a given folder. - """ - for mountpoint in umount_all_list(folder): - pmb.helpers.run.root(args, ["umount", mountpoint]) - if ismount(mountpoint): - raise RuntimeError("Failed to umount: " + mountpoint) diff --git a/pmb/helpers/other.py b/pmb/helpers/other.py deleted file mode 100644 index 91d3345c..00000000 --- a/pmb/helpers/other.py +++ /dev/null @@ -1,315 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import glob -import logging -import os -import re -import pmb.chroot -import pmb.config -import pmb.config.init -import pmb.helpers.pmaports -import pmb.helpers.run - - -def folder_size(args, path): - """ - Run `du` to calculate the size of a folder (this is less code and - faster than doing the same task in pure Python). This result is only - approximatelly right, but good enough for pmbootstrap's use case (#760). - - :returns: folder size in kilobytes - """ - output = pmb.helpers.run.root(args, ["du", "-ks", - path], output_return=True) - - # Only look at last line to filter out sudo garbage (#1766) - last_line = output.split("\n")[-2] - - ret = int(last_line.split("\t")[0]) - return ret - - -def check_grsec(): - """ - Check if the current kernel is based on the grsec patchset, and if - the chroot_deny_chmod option is enabled. Raise an exception in that - case, with a link to the issue. Otherwise, do nothing. - """ - path = "/proc/sys/kernel/grsecurity/chroot_deny_chmod" - if not os.path.exists(path): - return - - raise RuntimeError("You're running a kernel based on the grsec" - " patchset. This is not supported.") - - -def check_binfmt_misc(args): - """ - Check if the 'binfmt_misc' module is loaded by checking, if - /proc/sys/fs/binfmt_misc/ exists. If it exists, then do nothing. - Otherwise, load the module and mount binfmt_misc. - If that fails as well, raise an exception pointing the user to the wiki. - """ - path = "/proc/sys/fs/binfmt_misc/status" - if os.path.exists(path): - return - - # check=False: this might be built-in instead of being a module - pmb.helpers.run.root(args, ["modprobe", "binfmt_misc"], check=False) - - # check=False: we check it below and print a more helpful message on error - pmb.helpers.run.root(args, ["mount", "-t", "binfmt_misc", "none", - "/proc/sys/fs/binfmt_misc"], check=False) - - if not os.path.exists(path): - link = "https://postmarketos.org/binfmt_misc" - raise RuntimeError(f"Failed to set up binfmt_misc, see: {link}") - - -def migrate_success(args, version): - logging.info("Migration to version " + str(version) + " done") - with open(args.work + "/version", "w") as handle: - handle.write(str(version) + "\n") - - -def migrate_work_folder(args): - # Read current version - current = 0 - path = args.work + "/version" - if os.path.exists(path): - with open(path, "r") as f: - current = int(f.read().rstrip()) - - # Compare version, print warning or do nothing - required = pmb.config.work_version - if current == required: - return - logging.info("WARNING: Your work folder version needs to be migrated" - " (from version " + str(current) + " to " + str(required) + - ")!") - - # 0 => 1 - if current == 0: - # Ask for confirmation - logging.info("Changelog:") - logging.info("* Building chroots have a different username (#709)") - logging.info("Migration will do the following:") - logging.info("* Zap your chroots") - logging.info("* Adjust '" + args.work + "/config_abuild/abuild.conf'") - if not pmb.helpers.cli.confirm(args): - raise RuntimeError("Aborted.") - - # Zap and update abuild.conf - pmb.chroot.zap(args, False) - conf = args.work + "/config_abuild/abuild.conf" - if os.path.exists(conf): - pmb.helpers.run.root(args, ["sed", "-i", - "s./home/user/./home/pmos/.g", conf]) - # Update version file - migrate_success(args, 1) - current = 1 - - # 1 => 2 - if current == 1: - # Ask for confirmation - logging.info("Changelog:") - logging.info("* Fix: cache_distfiles was writable for everyone") - logging.info("Migration will do the following:") - logging.info("* Fix permissions of '" + args.work + - "/cache_distfiles'") - if not pmb.helpers.cli.confirm(args): - raise RuntimeError("Aborted.") - - # Fix permissions - dir = "/var/cache/distfiles" - for cmd in [["chown", "-R", "root:abuild", dir], - ["chmod", "-R", "664", dir], - ["chmod", "a+X", dir]]: - pmb.chroot.root(args, cmd) - migrate_success(args, 2) - current = 2 - - if current == 2: - # Ask for confirmation - logging.info("Changelog:") - logging.info("* Device chroots have a different user UID (#1576)") - logging.info("Migration will do the following:") - logging.info("* Zap your chroots") - if not pmb.helpers.cli.confirm(args): - raise RuntimeError("Aborted.") - - # Zap chroots - pmb.chroot.zap(args, False) - - # Update version file - migrate_success(args, 3) - current = 3 - - if current == 3: - # Ask for confirmation - path = args.work + "/cache_git" - logging.info("Changelog:") - logging.info("* pmbootstrap clones repositories with host system's") - logging.info(" 'git' instead of using it from an Alpine chroot") - logging.info("Migration will do the following:") - logging.info("* Check if 'git' is installed") - logging.info("* Change ownership to your user: " + path) - if not pmb.helpers.cli.confirm(args): - raise RuntimeError("Aborted.") - - # Require git, set cache_git ownership - pmb.config.init.require_programs() - if os.path.exists(path): - uid_gid = "{}:{}".format(os.getuid(), os.getgid()) - pmb.helpers.run.root(args, ["chown", "-R", uid_gid, path]) - else: - os.makedirs(path, 0o700, True) - - # Update version file - migrate_success(args, 4) - current = 4 - - if current == 4: - # Ask for confirmation - logging.info("Changelog:") - logging.info("* packages built by pmbootstrap are in a channel subdir") - logging.info("Migration will do the following:") - logging.info("* Move existing packages to edge subdir (if any)") - logging.info("* Zap your chroots") - if not pmb.helpers.cli.confirm(args): - raise RuntimeError("Aborted.") - - # Zap chroots - pmb.chroot.zap(args, False) - - # Move packages to edge subdir - edge_path = f"{args.work}/packages/edge" - pmb.helpers.run.root(args, ["mkdir", "-p", edge_path]) - for arch in pmb.config.build_device_architectures: - old_path = f"{args.work}/packages/{arch}" - new_path = f"{edge_path}/{arch}" - if os.path.exists(old_path): - if os.path.exists(new_path): - raise RuntimeError(f"Won't move '{old_path}' to" - f" '{new_path}', destination already" - " exists! Consider 'pmbootstrap zap -p'" - f" to delete '{args.work}/packages'.") - pmb.helpers.run.root(args, ["mv", old_path, new_path]) - pmb.helpers.run.root(args, ["chown", pmb.config.chroot_uid_user, - edge_path]) - - # Update version file - migrate_success(args, 5) - current = 5 - - if current == 5: - # Ask for confirmation - logging.info("Changelog:") - logging.info("* besides edge, pmaports channels have the same name") - logging.info(" as the branch now (pmbootstrap#2015)") - logging.info("Migration will do the following:") - logging.info("* Zap your chroots") - logging.info("* Adjust subdirs of your locally built packages dir:") - logging.info(f" {args.work}/packages") - logging.info(" stable => v20.05") - logging.info(" stable-next => v21.03") - if not pmb.helpers.cli.confirm(args): - raise RuntimeError("Aborted.") - - # Zap chroots to avoid potential "ERROR: Chroot 'native' was created - # for the 'stable' channel, but you are on the 'v20.05' channel now." - pmb.chroot.zap(args, False) - - # Migrate - packages_dir = f"{args.work}/packages" - for old, new in pmb.config.pmaports_channels_legacy.items(): - if os.path.exists(f"{packages_dir}/{old}"): - pmb.helpers.run.root(args, ["mv", old, new], packages_dir) - - # Update version file - migrate_success(args, 6) - current = 6 - - # Can't migrate, user must delete it - if current != required: - raise RuntimeError("Sorry, we can't migrate that automatically. Please" - " run 'pmbootstrap shutdown', then delete your" - " current work folder manually ('sudo rm -rf " + - args.work + "') and start over with 'pmbootstrap" - " init'. All your binary packages and caches will" - " be lost.") - - -def check_old_devices(args): - """ - Check if there are any device ports in device/*/APKBUILD, - rather than device/*/*/APKBUILD (e.g. device/testing/...). - """ - - g = glob.glob(args.aports + "/device/*/APKBUILD") - if not g: - return - - raise RuntimeError("Found device ports outside device/testing/... " - "Please run 'pmbootstrap pull' and/or move the " - "following device ports to device/testing:\n - " + - '\n - '.join(g)) - - -def validate_hostname(hostname): - """ - Check whether the string is a valid hostname, according to - - """ - # Check length - if len(hostname) > 63: - logging.fatal("ERROR: Hostname '" + hostname + "' is too long.") - return False - - # Check that it only contains valid chars - if not re.match(r"^[0-9a-z-\.]*$", hostname): - logging.fatal("ERROR: Hostname must only contain letters (a-z)," - " digits (0-9), minus signs (-), or periods (.)") - return False - - # Check that doesn't begin or end with a minus sign or period - if re.search(r"^-|^\.|-$|\.$", hostname): - logging.fatal("ERROR: Hostname must not begin or end with a minus" - " sign or period") - return False - - return True - - -""" -pmbootstrap uses this dictionary to save the result of expensive -results, so they work a lot faster the next time they are needed in the -same session. Usually the cache is written to and read from in the same -Python file, with code similar to the following: - -def lookup(key): - if key in pmb.helpers.other.cache["mycache"]: - return pmb.helpers.other.cache["mycache"][key] - ret = expensive_operation(args, key) - pmb.helpers.other.cache["mycache"][key] = ret - return ret -""" -cache = None - - -def init_cache(): - global cache - """ Add a caching dict (caches parsing of files etc. for the current - session) """ - repo_update = {"404": [], "offline_msg_shown": False} - cache = {"apkindex": {}, - "apkbuild": {}, - "apk_min_version_checked": [], - "apk_repository_list_updated": [], - "built": {}, - "find_aport": {}, - "pmb.helpers.package.depends_recurse": {}, - "pmb.helpers.package.get": {}, - "pmb.helpers.repo.update": repo_update, - "pmb.helpers.git.parse_channels_cfg": {}, - "pmb.config.pmaports.read_config": None} diff --git a/pmb/helpers/package.py b/pmb/helpers/package.py deleted file mode 100644 index f3541086..00000000 --- a/pmb/helpers/package.py +++ /dev/null @@ -1,181 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -""" -Functions that work with both pmaports and binary package repos. See also: -- pmb/helpers/pmaports.py (work with pmaports) -- pmb/helpers/repo.py (work with binary package repos) -""" -import copy -import logging - -import pmb.helpers.pmaports -import pmb.helpers.repo - - -def remove_operators(package): - for operator in [">", ">=", "<=", "=", "<", "~"]: - if operator in package: - package = package.split(operator)[0] - break - return package - - -def get(args, pkgname, arch, replace_subpkgnames=False, must_exist=True): - """ Find a package in pmaports, and as fallback in the APKINDEXes of the - binary packages. - :param pkgname: package name (e.g. "hello-world") - :param arch: preferred architecture of the binary package. When it - can't be found for this arch, we'll still look for another - arch to see whether the package exists at all. So make - sure to check the returned arch against what you wanted - with check_arch(). Example: "armhf" - :param replace_subpkgnames: replace all subpkgnames with their main - pkgnames in the depends (see #1733) - :param must_exist: raise an exception, if not found - :returns: * data from the parsed APKBUILD or APKINDEX in the following - format: {"arch": ["noarch"], - "depends": ["busybox-extras", "lddtree", ...], - "pkgname": "postmarketos-mkinitfs", - "provides": ["mkinitfs=0..1"], - "version": "0.0.4-r10"} - * None if the package was not found """ - # Cached result - cache_key = "pmb.helpers.package.get" - if ( - arch in pmb.helpers.other.cache[cache_key] and - pkgname in pmb.helpers.other.cache[cache_key][arch] and - replace_subpkgnames in pmb.helpers.other.cache[cache_key][arch][ - pkgname - ] - ): - return pmb.helpers.other.cache[cache_key][arch][pkgname][ - replace_subpkgnames - ] - - # Find in pmaports - ret = None - pmaport = pmb.helpers.pmaports.get(args, pkgname, False) - if pmaport: - ret = {"arch": pmaport["arch"], - "depends": pmb.build._package.get_depends(args, pmaport), - "pkgname": pmaport["pkgname"], - "provides": pmaport["provides"], - "version": pmaport["pkgver"] + "-r" + pmaport["pkgrel"]} - - # Find in APKINDEX (given arch) - if not ret or not pmb.helpers.pmaports.check_arches(ret["arch"], arch): - pmb.helpers.repo.update(args, arch) - ret_repo = pmb.parse.apkindex.package(args, pkgname, arch, False) - - # Save as result if there was no pmaport, or if the pmaport can not be - # built for the given arch, but there is a binary package for that arch - # (e.g. temp/mesa can't be built for x86_64, but Alpine has it) - if not ret or (ret_repo and ret_repo["arch"] == arch): - ret = ret_repo - - # Find in APKINDEX (other arches) - if not ret: - pmb.helpers.repo.update(args) - for arch_i in pmb.config.build_device_architectures: - if arch_i != arch: - ret = pmb.parse.apkindex.package(args, pkgname, arch_i, False) - if ret: - break - - # Copy ret (it might have references to caches of the APKINDEX or APKBUILDs - # and we don't want to modify those!) - if ret: - ret = copy.deepcopy(ret) - - # Make sure ret["arch"] is a list (APKINDEX code puts a string there) - if ret and isinstance(ret["arch"], str): - ret["arch"] = [ret["arch"]] - - # Replace subpkgnames if desired - if replace_subpkgnames: - depends_new = [] - for depend in ret["depends"]: - depend_data = get(args, depend, arch, must_exist=False) - if not depend_data: - logging.warning(f"WARNING: {pkgname}: failed to resolve" - f" dependency '{depend}'") - # Can't replace potential subpkgname - if depend not in depends_new: - depends_new += [depend] - continue - depend_pkgname = depend_data["pkgname"] - if depend_pkgname not in depends_new: - depends_new += [depend_pkgname] - ret["depends"] = depends_new - - # Save to cache and return - if ret: - if arch not in pmb.helpers.other.cache[cache_key]: - pmb.helpers.other.cache[cache_key][arch] = {} - if pkgname not in pmb.helpers.other.cache[cache_key][arch]: - pmb.helpers.other.cache[cache_key][arch][pkgname] = {} - pmb.helpers.other.cache[cache_key][arch][pkgname][ - replace_subpkgnames - ] = ret - return ret - - # Could not find the package - if not must_exist: - return None - raise RuntimeError("Package '" + pkgname + "': Could not find aport, and" - " could not find this package in any APKINDEX!") - - -def depends_recurse(args, pkgname, arch): - """ Recursively resolve all of the package's dependencies. - :param pkgname: name of the package (e.g. "device-samsung-i9100") - :param arch: preferred architecture for binary packages - :returns: a list of pkgname_start and all its dependencies, e.g: - ["busybox-static-armhf", "device-samsung-i9100", - "linux-samsung-i9100", ...] """ - # Cached result - cache_key = "pmb.helpers.package.depends_recurse" - if (arch in pmb.helpers.other.cache[cache_key] and - pkgname in pmb.helpers.other.cache[cache_key][arch]): - return pmb.helpers.other.cache[cache_key][arch][pkgname] - - # Build ret (by iterating over the queue) - queue = [pkgname] - ret = [] - while len(queue): - pkgname_queue = queue.pop() - package = get(args, pkgname_queue, arch) - - # Add its depends to the queue - for depend in package["depends"]: - if depend not in ret: - queue += [depend] - - # Add the pkgname (not possible subpkgname) to ret - if package["pkgname"] not in ret: - ret += [package["pkgname"]] - ret.sort() - - # Save to cache and return - if arch not in pmb.helpers.other.cache[cache_key]: - pmb.helpers.other.cache[cache_key][arch] = {} - pmb.helpers.other.cache[cache_key][arch][pkgname] = ret - return ret - - -def check_arch(args, pkgname, arch, binary=True): - """ Can a package be built for a certain architecture, or is there a binary - package for it? - - :param pkgname: name of the package - :param arch: architecture to check against - :param binary: set to False to only look at the pmaports, not at binary - packages - :returns: True when the package can be built, or there is a binary - package, False otherwise - """ - if binary: - arches = get(args, pkgname, arch)["arch"] - else: - arches = pmb.helpers.pmaports.get(args, pkgname)["arch"] - return pmb.helpers.pmaports.check_arches(arches, arch) diff --git a/pmb/helpers/pkgrel_bump.py b/pmb/helpers/pkgrel_bump.py deleted file mode 100644 index 982cc2fc..00000000 --- a/pmb/helpers/pkgrel_bump.py +++ /dev/null @@ -1,130 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging - -import pmb.helpers.file -import pmb.helpers.pmaports -import pmb.helpers.repo -import pmb.parse - - -def package(args, pkgname, reason="", dry=False): - """ - Increase the pkgrel in the APKBUILD of a specific package. - - :param pkgname: name of the package - :param reason: string to display as reason why it was increased - :param dry: don't modify the APKBUILD, just print the message - """ - # Current and new pkgrel - path = pmb.helpers.pmaports.find(args, pkgname) + "/APKBUILD" - apkbuild = pmb.parse.apkbuild(path) - pkgrel = int(apkbuild["pkgrel"]) - pkgrel_new = pkgrel + 1 - - # Display the message, bail out in dry mode - logging.info("Increase '" + pkgname + "' pkgrel (" + str(pkgrel) + " -> " + - str(pkgrel_new) + ")" + reason) - if dry: - return - - # Increase - old = "\npkgrel=" + str(pkgrel) + "\n" - new = "\npkgrel=" + str(pkgrel_new) + "\n" - pmb.helpers.file.replace(path, old, new) - - # Verify - del pmb.helpers.other.cache["apkbuild"][path] - apkbuild = pmb.parse.apkbuild(path) - if int(apkbuild["pkgrel"]) != pkgrel_new: - raise RuntimeError("Failed to bump pkgrel for package '" + pkgname + - "'. Make sure that there's a line with exactly the" - " string '" + old + "' and nothing else in: " + - path) - - -def auto_apkindex_package(args, arch, aport, apk, dry=False): - """ - Bump the pkgrel of a specific package if it is outdated in the given - APKINDEX. - - :param arch: the architecture, e.g. "armhf" - :param aport: parsed APKBUILD of the binary package's origin: - {"pkgname": ..., "pkgver": ..., "pkgrel": ..., ...} - :param apk: information about the binary package from the APKINDEX: - {"version": ..., "depends": [...], ...} - :param dry: don't modify the APKBUILD, just print the message - :returns: True when there was an APKBUILD that needed to be changed. - """ - version_aport = aport["pkgver"] + "-r" + aport["pkgrel"] - version_apk = apk["version"] - pkgname = aport["pkgname"] - - # Skip when aport version != binary package version - compare = pmb.parse.version.compare(version_aport, version_apk) - if compare == -1: - logging.warning("{}: skipping, because the aport version {} is lower" - " than the binary version {}".format(pkgname, - version_aport, - version_apk)) - return - if compare == 1: - logging.verbose("{}: skipping, because the aport version {} is higher" - " than the binary version {}".format(pkgname, - version_aport, - version_apk)) - return - - # Find missing depends - depends = apk["depends"] - logging.verbose("{}: checking depends: {}".format(pkgname, - ", ".join(depends))) - missing = [] - for depend in depends: - if depend.startswith("!"): - # Ignore conflict-dependencies - continue - - providers = pmb.parse.apkindex.providers(args, depend, arch, - must_exist=False) - if providers == {}: - # We're only interested in missing depends starting with "so:" - # (which means dynamic libraries that the package was linked - # against) and packages for which no aport exists. - if (depend.startswith("so:") or - not pmb.helpers.pmaports.find(args, depend, False)): - missing.append(depend) - - # Increase pkgrel - if len(missing): - package(args, pkgname, reason=", missing depend(s): " + - ", ".join(missing), dry=dry) - return True - - -def auto(args, dry=False): - """ - :returns: list of aport names, where the pkgrel needed to be changed - """ - ret = [] - for arch in pmb.config.build_device_architectures: - paths = pmb.helpers.repo.apkindex_files(args, arch, alpine=False) - for path in paths: - logging.info("scan " + path) - index = pmb.parse.apkindex.parse(path, False) - for pkgname, apk in index.items(): - origin = apk["origin"] - # Only increase once! - if origin in ret: - logging.verbose( - f"{pkgname}: origin '{origin}' found again") - continue - aport_path = pmb.helpers.pmaports.find(args, origin, False) - if not aport_path: - logging.warning("{}: origin '{}' aport not found".format( - pkgname, origin)) - continue - aport = pmb.parse.apkbuild(f"{aport_path}/APKBUILD") - if auto_apkindex_package(args, arch, aport, apk, dry): - ret.append(pkgname) - return ret diff --git a/pmb/helpers/pmaports.py b/pmb/helpers/pmaports.py deleted file mode 100644 index 6d5535bf..00000000 --- a/pmb/helpers/pmaports.py +++ /dev/null @@ -1,294 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -""" -Functions that work with pmaports. See also: -- pmb/helpers/repo.py (work with binary package repos) -- pmb/helpers/package.py (work with both) -""" -import glob -import logging -import os - -import pmb.parse - - -def _find_apkbuilds(args): - # Try to get a cached result first (we assume that the aports don't change - # in one pmbootstrap call) - apkbuilds = pmb.helpers.other.cache.get("pmb.helpers.pmaports.apkbuilds") - if apkbuilds is not None: - return apkbuilds - - apkbuilds = {} - for apkbuild in glob.iglob(f"{args.aports}/**/*/APKBUILD", recursive=True): - package = os.path.basename(os.path.dirname(apkbuild)) - if package in apkbuilds: - raise RuntimeError(f"Package {package} found in multiple aports " - "subfolders. Please put it only in one folder.") - apkbuilds[package] = apkbuild - - # Sort dictionary so we don't need to do it over and over again in - # get_list() - apkbuilds = dict(sorted(apkbuilds.items())) - - # Save result in cache - pmb.helpers.other.cache["pmb.helpers.pmaports.apkbuilds"] = apkbuilds - return apkbuilds - - -def get_list(args): - """ :returns: list of all pmaport pkgnames (["hello-world", ...]) """ - return list(_find_apkbuilds(args).keys()) - - -def guess_main_dev(args, subpkgname): - """ - Check if a package without "-dev" at the end exists in pmaports or not, and - log the appropriate message. Don't call this function directly, use - guess_main() instead. - - :param subpkgname: subpackage name, must end in "-dev" - :returns: full path to the pmaport or None - """ - pkgname = subpkgname[:-4] - path = _find_apkbuilds(args).get(pkgname) - if path: - logging.verbose(subpkgname + ": guessed to be a subpackage of " + - pkgname + " (just removed '-dev')") - return os.path.dirname(path) - - logging.verbose(subpkgname + ": guessed to be a subpackage of " + pkgname + - ", which we can't find in pmaports, so it's probably in" - " Alpine") - return None - - -def guess_main(args, subpkgname): - """ - Find the main package by assuming it is a prefix of the subpkgname. - We do that, because in some APKBUILDs the subpkgname="" variable gets - filled with a shell loop and the APKBUILD parser in pmbootstrap can't - parse this right. (Intentionally, we don't want to implement a full shell - parser.) - - :param subpkgname: subpackage name (e.g. "u-boot-some-device") - :returns: * full path to the aport, e.g.: - "/home/user/code/pmbootstrap/aports/main/u-boot" - * None when we couldn't find a main package - """ - # Packages ending in -dev: just assume that the originating aport has the - # same pkgname, except for the -dev at the end. If we use the other method - # below on subpackages, we may end up with the wrong package. For example, - # if something depends on plasma-framework-dev, and plasma-framework is in - # Alpine, but plasma is in pmaports, then the cutting algorithm below would - # pick plasma instead of plasma-framework. - if subpkgname.endswith("-dev"): - return guess_main_dev(args, subpkgname) - - # Iterate until the cut up subpkgname is gone - words = subpkgname.split("-") - while len(words) > 1: - # Remove one dash-separated word at a time ("a-b-c" -> "a-b") - words.pop() - pkgname = "-".join(words) - - # Look in pmaports - path = _find_apkbuilds(args).get(pkgname) - if path: - logging.verbose(subpkgname + ": guessed to be a subpackage of " + - pkgname) - return os.path.dirname(path) - - -def _find_package_in_apkbuild(package, path): - """ - Look through subpackages and all provides to see if the APKBUILD at the - specified path contains (or provides) the specified package. - - :param package: The package to search for - :param path: The path to the apkbuild - :return: True if the APKBUILD contains or provides the package - """ - apkbuild = pmb.parse.apkbuild(path) - - # Subpackages - if package in apkbuild["subpackages"]: - return True - - # Search for provides in both package and subpackages - apkbuild_pkgs = [apkbuild, *apkbuild["subpackages"].values()] - for apkbuild_pkg in apkbuild_pkgs: - if not apkbuild_pkg: - continue - - # Provides (cut off before equals sign for entries like - # "mkbootimg=0.0.1") - for provides_i in apkbuild_pkg["provides"]: - # Ignore provides without version, they shall never be - # automatically selected - if "=" not in provides_i: - continue - - if package == provides_i.split("=", 1)[0]: - return True - - return False - - -def find(args, package, must_exist=True): - """ - Find the aport path that provides a certain subpackage. - If you want the parsed APKBUILD instead, use pmb.helpers.pmaports.get(). - - :param must_exist: Raise an exception, when not found - :returns: the full path to the aport folder - """ - # Try to get a cached result first (we assume that the aports don't change - # in one pmbootstrap call) - ret = None - if package in pmb.helpers.other.cache["find_aport"]: - ret = pmb.helpers.other.cache["find_aport"][package] - else: - # Sanity check - if "*" in package: - raise RuntimeError("Invalid pkgname: " + package) - - # Try to find an APKBUILD with the exact pkgname we are looking for - path = _find_apkbuilds(args).get(package) - if path: - ret = os.path.dirname(path) - else: - # No luck, take a guess what APKBUILD could have the package we are - # looking for as subpackage - guess = guess_main(args, package) - if guess: - # Parse the APKBUILD and verify if the guess was right - if _find_package_in_apkbuild(package, f'{guess}/APKBUILD'): - ret = guess - else: - # Otherwise parse all APKBUILDs (takes time!), is the - # package we are looking for a subpackage of any of those? - for path_current in _find_apkbuilds(args).values(): - if _find_package_in_apkbuild(package, path_current): - ret = os.path.dirname(path_current) - break - - # If we still didn't find anything, as last resort: assume our - # initial guess was right and the APKBUILD parser just didn't - # find the subpackage in there because it is behind shell logic - # that we don't parse. - if not ret: - ret = guess - - # Crash when necessary - if ret is None and must_exist: - raise RuntimeError("Could not find aport for package: " + - package) - - # Save result in cache - pmb.helpers.other.cache["find_aport"][package] = ret - return ret - - -def get(args, pkgname, must_exist=True, subpackages=True): - """ Find and parse an APKBUILD file. - Run 'pmbootstrap apkbuild_parse hello-world' for a full output example. - Relevant variables are defined in pmb.config.apkbuild_attributes. - - :param pkgname: the package name to find - :param must_exist: raise an exception when it can't be found - :param subpackages: also search for subpackages with the specified - names (slow! might need to parse all APKBUILDs to - find it) - :returns: relevant variables from the APKBUILD as dictionary, e.g.: - { "pkgname": "hello-world", - "arch": ["all"], - "pkgrel": "4", - "pkgrel": "1", - "options": [], - ... } - """ - pkgname = pmb.helpers.package.remove_operators(pkgname) - if subpackages: - aport = find(args, pkgname, must_exist) - if aport: - return pmb.parse.apkbuild(f"{aport}/APKBUILD") - else: - path = _find_apkbuilds(args).get(pkgname) - if path: - return pmb.parse.apkbuild(path) - if must_exist: - raise RuntimeError("Could not find APKBUILD for package:" - f" {pkgname}") - - return None - - -def find_providers(args, provide): - """ - Search for providers of the specified (virtual) package in pmaports. - Note: Currently only providers from a single APKBUILD are returned. - - :param provide: the (virtual) package to search providers for - :returns: tuple list (pkgname, apkbuild_pkg) with providers, sorted by - provider_priority. The provider with the highest priority - (which would be selected by default) comes first. - """ - - providers = {} - - apkbuild = get(args, provide) - for subpkgname, subpkg in apkbuild["subpackages"].items(): - for provides in subpkg["provides"]: - # Strip provides version (=$pkgver-r$pkgrel) - if provides.split("=", 1)[0] == provide: - providers[subpkgname] = subpkg - - return sorted(providers.items(), reverse=True, - key=lambda p: p[1].get('provider_priority', 0)) - - -def get_repo(args, pkgname, must_exist=True): - """ Get the repository folder of an aport. - - :pkgname: package name - :must_exist: raise an exception when it can't be found - :returns: a string like "main", "device", "cross", ... - or None when the aport could not be found """ - aport = find(args, pkgname, must_exist) - if not aport: - return None - return os.path.basename(os.path.dirname(aport)) - - -def check_arches(arches, arch): - """ Check if building for a certain arch is allowed. - - :param arches: list of all supported arches, as it can be found in the - arch="" line of APKBUILDS (including all, noarch, - !arch, ...). For example: ["x86_64", "x86", "!armhf"] - :param arch: the architecture to check for - :returns: True when building is allowed, False otherwise - """ - if "!" + arch in arches: - return False - for value in [arch, "all", "noarch"]: - if value in arches: - return True - return False - - -def get_channel_new(channel): - """ Translate legacy channel names to the new ones. Legacy names are still - supported for compatibility with old branches (pmb#2015). - :param channel: name as read from pmaports.cfg or channels.cfg, like - "edge", "v21.03" etc., or potentially a legacy name - like "stable". - :returns: name in the new format, e.g. "edge" or "v21.03" - """ - legacy_cfg = pmb.config.pmaports_channels_legacy - if channel in legacy_cfg: - ret = legacy_cfg[channel] - logging.verbose(f"Legacy channel '{channel}' translated to '{ret}'") - return ret - return channel diff --git a/pmb/helpers/repo.py b/pmb/helpers/repo.py deleted file mode 100644 index 76d09be8..00000000 --- a/pmb/helpers/repo.py +++ /dev/null @@ -1,219 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -""" -Functions that work with binary package repos. See also: -- pmb/helpers/pmaports.py (work with pmaports) -- pmb/helpers/package.py (work with both) -""" -import os -import hashlib -import logging -import pmb.config.pmaports -import pmb.helpers.http -import pmb.helpers.run - - -def hash(url, length=8): - """ - Generate the hash that APK adds to the APKINDEX and apk packages - in its apk cache folder. It is the "12345678" part in this example: - "APKINDEX.12345678.tar.gz". - - :param length: The length of the hash in the output file. - - See also: official implementation in apk-tools: - - - blob.c: apk_blob_push_hexdump(), "const char *xd" - apk_defines.h: APK_CACHE_CSUM_BYTES - database.c: apk_repo_format_cache_index() - """ - binary = hashlib.sha1(url.encode("utf-8")).digest() - xd = "0123456789abcdefghijklmnopqrstuvwxyz" - csum_bytes = int(length / 2) - - ret = "" - for i in range(csum_bytes): - ret += xd[(binary[i] >> 4) & 0xf] - ret += xd[binary[i] & 0xf] - - return ret - - -def urls(args, user_repository=True, postmarketos_mirror=True, alpine=True): - """ - Get a list of repository URLs, as they are in /etc/apk/repositories. - :param user_repository: add /mnt/pmbootstrap/packages - :param postmarketos_mirror: add postmarketos mirror URLs - :param alpine: add alpine mirror URLs - :returns: list of mirror strings, like ["/mnt/pmbootstrap/packages", - "http://...", ...] - """ - ret = [] - - # Get mirrordirs from channels.cfg (postmarketOS mirrordir is the same as - # the pmaports branch of the channel, no need to make it more complicated) - channel_cfg = pmb.config.pmaports.read_config_channel(args) - mirrordir_pmos = channel_cfg["branch_pmaports"] - mirrordir_alpine = channel_cfg["mirrordir_alpine"] - - # Local user repository (for packages compiled with pmbootstrap) - if user_repository: - ret.append("/mnt/pmbootstrap/packages") - - # Upstream postmarketOS binary repository - if postmarketos_mirror: - for mirror in args.mirrors_postmarketos: - # Remove "master" mirrordir to avoid breakage until bpo is adjusted - # (build.postmarketos.org#63) and to give potential other users of - # this flag a heads up. - if mirror.endswith("/master"): - logging.warning("WARNING: 'master' at the end of" - " --mirror-pmOS is deprecated, the branch gets" - " added automatically now!") - mirror = mirror[:-1 * len("master")] - ret.append(f"{mirror}{mirrordir_pmos}") - - # Upstream Alpine Linux repositories - if alpine: - directories = ["main", "community"] - if mirrordir_alpine == "edge": - directories.append("testing") - for dir in directories: - ret.append(f"{args.mirror_alpine}{mirrordir_alpine}/{dir}") - return ret - - -def apkindex_files(args, arch=None, user_repository=True, pmos=True, - alpine=True): - """ - Get a list of outside paths to all resolved APKINDEX.tar.gz files for a - specific arch. - :param arch: defaults to native - :param user_repository: add path to index of locally built packages - :param pmos: add paths to indexes of postmarketos mirrors - :param alpine: add paths to indexes of alpine mirrors - :returns: list of absolute APKINDEX.tar.gz file paths - """ - if not arch: - arch = pmb.config.arch_native - - ret = [] - # Local user repository (for packages compiled with pmbootstrap) - if user_repository: - channel = pmb.config.pmaports.read_config(args)["channel"] - ret = [f"{args.work}/packages/{channel}/{arch}/APKINDEX.tar.gz"] - - # Resolve the APKINDEX.$HASH.tar.gz files - for url in urls(args, False, pmos, alpine): - ret.append(args.work + "/cache_apk_" + arch + "/APKINDEX." + - hash(url) + ".tar.gz") - - return ret - - -def update(args, arch=None, force=False, existing_only=False): - """ - Download the APKINDEX files for all URLs depending on the architectures. - - :param arch: * one Alpine architecture name ("x86_64", "armhf", ...) - * None for all architectures - :param force: even update when the APKINDEX file is fairly recent - :param existing_only: only update the APKINDEX files that already exist, - this is used by "pmbootstrap update" - - :returns: True when files have been downloaded, False otherwise - """ - # Skip in offline mode, only show once - cache_key = "pmb.helpers.repo.update" - if args.offline: - if not pmb.helpers.other.cache[cache_key]["offline_msg_shown"]: - logging.info("NOTE: skipping package index update (offline mode)") - pmb.helpers.other.cache[cache_key]["offline_msg_shown"] = True - return False - - # Architectures and retention time - architectures = [arch] if arch else pmb.config.build_device_architectures - retention_hours = pmb.config.apkindex_retention_time - retention_seconds = retention_hours * 3600 - - # Find outdated APKINDEX files. Formats: - # outdated: {URL: apkindex_path, ... } - # outdated_arches: ["armhf", "x86_64", ... ] - outdated = {} - outdated_arches = [] - for url in urls(args, False): - for arch in architectures: - # APKINDEX file name from the URL - url_full = url + "/" + arch + "/APKINDEX.tar.gz" - cache_apk_outside = args.work + "/cache_apk_" + arch - apkindex = cache_apk_outside + "/APKINDEX." + hash(url) + ".tar.gz" - - # Find update reason, possibly skip non-existing or known 404 files - reason = None - if url_full in pmb.helpers.other.cache[cache_key]["404"]: - # We already attempted to download this file once in this - # session - continue - elif not os.path.exists(apkindex): - if existing_only: - continue - reason = "file does not exist yet" - elif force: - reason = "forced update" - elif pmb.helpers.file.is_older_than(apkindex, retention_seconds): - reason = "older than " + str(retention_hours) + "h" - if not reason: - continue - - # Update outdated and outdated_arches - logging.debug("APKINDEX outdated (" + reason + "): " + url_full) - outdated[url_full] = apkindex - if arch not in outdated_arches: - outdated_arches.append(arch) - - # Bail out or show log message - if not len(outdated): - return False - logging.info("Update package index for " + ", ".join(outdated_arches) + - " (" + str(len(outdated)) + " file(s))") - - # Download and move to right location - for (i, (url, target)) in enumerate(outdated.items()): - pmb.helpers.cli.progress_print(args, i / len(outdated)) - temp = pmb.helpers.http.download(args, url, "APKINDEX", False, - logging.DEBUG, True) - if not temp: - pmb.helpers.other.cache[cache_key]["404"].append(url) - continue - target_folder = os.path.dirname(target) - if not os.path.exists(target_folder): - pmb.helpers.run.root(args, ["mkdir", "-p", target_folder]) - pmb.helpers.run.root(args, ["cp", temp, target]) - pmb.helpers.cli.progress_flush(args) - - return True - - -def alpine_apkindex_path(args, repo="main", arch=None): - """ - Get the path to a specific Alpine APKINDEX file on disk and download it if - necessary. - - :param repo: Alpine repository name (e.g. "main") - :param arch: Alpine architecture (e.g. "armhf"), defaults to native arch. - :returns: full path to the APKINDEX file - """ - # Repo sanity check - if repo not in ["main", "community", "testing", "non-free"]: - raise RuntimeError("Invalid Alpine repository: " + repo) - - # Download the file - arch = arch or pmb.config.arch_native - update(args, arch) - - # Find it on disk - channel_cfg = pmb.config.pmaports.read_config_channel(args) - repo_link = f"{args.mirror_alpine}{channel_cfg['mirrordir_alpine']}/{repo}" - cache_folder = args.work + "/cache_apk_" + arch - return cache_folder + "/APKINDEX." + hash(repo_link) + ".tar.gz" diff --git a/pmb/helpers/repo_missing.py b/pmb/helpers/repo_missing.py deleted file mode 100644 index 294fed63..00000000 --- a/pmb/helpers/repo_missing.py +++ /dev/null @@ -1,135 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging - -import pmb.build -import pmb.helpers.package -import pmb.helpers.pmaports - - -def filter_missing_packages(args, arch, pkgnames): - """ Create a subset of pkgnames with missing or outdated binary packages. - - :param arch: architecture (e.g. "armhf") - :param pkgnames: list of package names (e.g. ["hello-world", "test12"]) - :returns: subset of pkgnames (e.g. ["hello-world"]) """ - ret = [] - for pkgname in pkgnames: - binary = pmb.parse.apkindex.package(args, pkgname, arch, False) - must_exist = False if binary else True - pmaport = pmb.helpers.pmaports.get(args, pkgname, must_exist) - if pmaport and pmb.build.is_necessary(args, arch, pmaport): - ret.append(pkgname) - return ret - - -def filter_aport_packages(args, arch, pkgnames): - """ Create a subset of pkgnames where each one has an aport. - - :param arch: architecture (e.g. "armhf") - :param pkgnames: list of package names (e.g. ["hello-world", "test12"]) - :returns: subset of pkgnames (e.g. ["hello-world"]) """ - ret = [] - for pkgname in pkgnames: - if pmb.helpers.pmaports.find(args, pkgname, False): - ret += [pkgname] - return ret - - -def filter_arch_packages(args, arch, pkgnames): - """ Create a subset of pkgnames with packages removed that can not be - built for a certain arch. - - :param arch: architecture (e.g. "armhf") - :param pkgnames: list of package names (e.g. ["hello-world", "test12"]) - :returns: subset of pkgnames (e.g. ["hello-world"]) """ - ret = [] - for pkgname in pkgnames: - if pmb.helpers.package.check_arch(args, pkgname, arch, False): - ret += [pkgname] - return ret - - -def get_relevant_packages(args, arch, pkgname=None, built=False): - """ Get all packages that can be built for the architecture in question. - - :param arch: architecture (e.g. "armhf") - :param pkgname: only look at a specific package (and its dependencies) - :param built: include packages that have already been built - :returns: an alphabetically sorted list of pkgnames, e.g.: - ["devicepkg-dev", "hello-world", "osk-sdl"] """ - if pkgname: - if not pmb.helpers.package.check_arch(args, pkgname, arch, False): - raise RuntimeError(pkgname + " can't be built for " + arch + ".") - ret = pmb.helpers.package.depends_recurse(args, pkgname, arch) - else: - ret = pmb.helpers.pmaports.get_list(args) - ret = filter_arch_packages(args, arch, ret) - if built: - ret = filter_aport_packages(args, arch, ret) - if not len(ret): - logging.info("NOTE: no aport found for any package in the" - " dependency tree, it seems they are all provided by" - " upstream (Alpine).") - else: - ret = filter_missing_packages(args, arch, ret) - if not len(ret): - logging.info("NOTE: all relevant packages are up to date, use" - " --built to include the ones that have already been" - " built.") - - # Sort alphabetically (to get a deterministic build order) - ret.sort() - return ret - - -def generate_output_format(args, arch, pkgnames): - """ Generate the detailed output format. - :param arch: architecture - :param pkgnames: list of package names that should be in the output, - e.g.: ["hello-world", "pkg-depending-on-hello-world"] - :returns: a list like the following: - [{"pkgname": "hello-world", - "repo": "main", - "version": "1-r4", - "depends": []}, - {"pkgname": "pkg-depending-on-hello-world", - "version": "0.5-r0", - "repo": "main", - "depends": ["hello-world"]}] """ - ret = [] - for pkgname in pkgnames: - entry = pmb.helpers.package.get(args, pkgname, arch, True) - ret += [{"pkgname": entry["pkgname"], - "repo": pmb.helpers.pmaports.get_repo(args, pkgname), - "version": entry["version"], - "depends": entry["depends"]}] - return ret - - -def generate(args, arch, overview, pkgname=None, built=False): - """ Get packages that need to be built, with all their dependencies. - - :param arch: architecture (e.g. "armhf") - :param pkgname: only look at a specific package - :param built: include packages that have already been built - :returns: a list like the following: - [{"pkgname": "hello-world", - "repo": "main", - "version": "1-r4"}, - {"pkgname": "package-depending-on-hello-world", - "version": "0.5-r0", - "repo": "main"}] - """ - # Log message - packages_str = pkgname if pkgname else "all packages" - logging.info("Calculate packages that need to be built ({}, {})" - "".format(packages_str, arch)) - - # Order relevant packages - ret = get_relevant_packages(args, arch, pkgname, built) - - # Output format - if overview: - return ret - return generate_output_format(args, arch, ret) diff --git a/pmb/helpers/run.py b/pmb/helpers/run.py deleted file mode 100644 index 7ffe00c1..00000000 --- a/pmb/helpers/run.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import pmb.helpers.run_core - - -def user(args, cmd, working_dir=None, output="log", output_return=False, - check=None, env={}, sudo=False): - """ - Run a command on the host system as user. - - :param env: dict of environment variables to be passed to the command, e.g. - {"JOBS": "5"} - - See pmb.helpers.run_core.core() for a detailed description of all other - arguments and the return value. - """ - # Readable log message (without all the escaping) - msg = "% " - for key, value in env.items(): - msg += key + "=" + value + " " - if working_dir: - msg += "cd " + working_dir + "; " - msg += " ".join(cmd) - - # Add environment variables and run - if env: - cmd = ["sh", "-c", pmb.helpers.run_core.flat_cmd(cmd, env=env)] - return pmb.helpers.run_core.core(args, msg, cmd, working_dir, output, - output_return, check, sudo) - - -def root(args, cmd, working_dir=None, output="log", output_return=False, - check=None, env={}): - """ - Run a command on the host system as root, with sudo or doas. - - :param env: dict of environment variables to be passed to the command, e.g. - {"JOBS": "5"} - - See pmb.helpers.run_core.core() for a detailed description of all other - arguments and the return value. - """ - if env: - cmd = ["sh", "-c", pmb.helpers.run_core.flat_cmd(cmd, env=env)] - cmd = pmb.config.sudo(cmd) - - return user(args, cmd, working_dir, output, output_return, check, env, - True) diff --git a/pmb/helpers/run_core.py b/pmb/helpers/run_core.py deleted file mode 100644 index 10c52afb..00000000 --- a/pmb/helpers/run_core.py +++ /dev/null @@ -1,393 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import fcntl -import logging -import os -import selectors -import shlex -import subprocess -import sys -import threading -import time -import pmb.helpers.run - -""" For a detailed description of all output modes, read the description of - core() at the bottom. All other functions in this file get (indirectly) - called by core(). """ - - -def flat_cmd(cmd, working_dir=None, env={}): - """ - Convert a shell command passed as list into a flat shell string with - proper escaping. - - :param cmd: command as list, e.g. ["echo", "string with spaces"] - :param working_dir: when set, prepend "cd ...;" to execute the command - in the given working directory - :param env: dict of environment variables to be passed to the command, e.g. - {"JOBS": "5"} - :returns: the flat string, e.g. - echo 'string with spaces' - cd /home/pmos;echo 'string with spaces' - """ - # Merge env and cmd into escaped list - escaped = [] - for key, value in env.items(): - escaped.append(key + "=" + shlex.quote(value)) - for i in range(len(cmd)): - escaped.append(shlex.quote(cmd[i])) - - # Prepend working dir - ret = " ".join(escaped) - if working_dir: - ret = "cd " + shlex.quote(working_dir) + ";" + ret - - return ret - - -def sanity_checks(output="log", output_return=False, check=None): - """ - Raise an exception if the parameters passed to core() don't make sense - (all parameters are described in core() below). - """ - vals = ["log", "stdout", "interactive", "tui", "background", "pipe"] - if output not in vals: - raise RuntimeError("Invalid output value: " + str(output)) - - # Prevent setting the check parameter with output="background". - # The exit code won't be checked when running in background, so it would - # always by check=False. But we prevent it from getting set to check=False - # as well, so it does not look like you could change it to check=True. - if check is not None and output == "background": - raise RuntimeError("Can't use check with output: background") - - if output_return and output in ["tui", "background"]: - raise RuntimeError("Can't use output_return with output: " + output) - - -def background(cmd, working_dir=None): - """ Run a subprocess in background and redirect its output to the log. """ - ret = subprocess.Popen(cmd, stdout=pmb.helpers.logging.logfd, - stderr=pmb.helpers.logging.logfd, cwd=working_dir) - logging.debug(f"New background process: pid={ret.pid}, output=background") - return ret - - -def pipe(cmd, working_dir=None): - """ Run a subprocess in background and redirect its output to a pipe. """ - ret = subprocess.Popen(cmd, stdout=subprocess.PIPE, - stdin=subprocess.DEVNULL, - stderr=pmb.helpers.logging.logfd, cwd=working_dir) - logging.verbose(f"New background process: pid={ret.pid}, output=pipe") - return ret - - -def pipe_read(process, output_to_stdout=False, output_return=False, - output_return_buffer=False): - """ - Read all available output from a subprocess and copy it to the log and - optionally stdout and a buffer variable. This is only meant to be called by - foreground_pipe() below. - - :param process: subprocess.Popen instance - :param output_to_stdout: copy all output to pmbootstrap's stdout - :param output_return: when set to True, output_return_buffer will be - extended - :param output_return_buffer: list of bytes that gets extended with the - current output in case output_return is True. - """ - while True: - # Copy available output - out = process.stdout.readline() - if len(out): - pmb.helpers.logging.logfd.buffer.write(out) - if output_to_stdout: - sys.stdout.buffer.write(out) - if output_return: - output_return_buffer.append(out) - continue - - # No more output (flush buffers) - pmb.helpers.logging.logfd.flush() - if output_to_stdout: - sys.stdout.flush() - return - - -def kill_process_tree(args, pid, ppids, sudo): - """ - Recursively kill a pid and its child processes - - :param pid: process id that will be killed - :param ppids: list of process id and parent process id tuples (pid, ppid) - :param sudo: use sudo to kill the process - """ - if sudo: - pmb.helpers.run.root(args, ["kill", "-9", str(pid)], - check=False) - else: - pmb.helpers.run.user(args, ["kill", "-9", str(pid)], - check=False) - - for (child_pid, child_ppid) in ppids: - if child_ppid == str(pid): - kill_process_tree(args, child_pid, ppids, sudo) - - -def kill_command(args, pid, sudo): - """ - Kill a command process and recursively kill its child processes - - :param pid: process id that will be killed - :param sudo: use sudo to kill the process - """ - cmd = ["ps", "-e", "-o", "pid,ppid"] - ret = subprocess.run(cmd, check=True, stdout=subprocess.PIPE) - ppids = [] - proc_entries = ret.stdout.decode("utf-8").rstrip().split('\n')[1:] - for row in proc_entries: - items = row.split() - if len(items) != 2: - raise RuntimeError("Unexpected ps output: " + row) - ppids.append(items) - - kill_process_tree(args, pid, ppids, sudo) - - -def foreground_pipe(args, cmd, working_dir=None, output_to_stdout=False, - output_return=False, output_timeout=True, - sudo=False, stdin=None): - """ - Run a subprocess in foreground with redirected output and optionally kill - it after being silent for too long. - - :param cmd: command as list, e.g. ["echo", "string with spaces"] - :param working_dir: path in host system where the command should run - :param output_to_stdout: copy all output to pmbootstrap's stdout - :param output_return: return the output of the whole program - :param output_timeout: kill the process when it doesn't print any output - after a certain time (configured with --timeout) - and raise a RuntimeError exception - :param sudo: use sudo to kill the process when it hits the timeout - :returns: (code, output) - * code: return code of the program - * output: "" - * output: full program output string (output_return is True) - """ - # Start process in background (stdout and stderr combined) - process = subprocess.Popen(cmd, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, cwd=working_dir, - stdin=stdin) - - # Make process.stdout non-blocking - handle = process.stdout.fileno() - flags = fcntl.fcntl(handle, fcntl.F_GETFL) - fcntl.fcntl(handle, fcntl.F_SETFL, flags | os.O_NONBLOCK) - - # While process exists wait for output (with timeout) - output_buffer = [] - sel = selectors.DefaultSelector() - sel.register(process.stdout, selectors.EVENT_READ) - timeout = args.timeout if output_timeout else None - while process.poll() is None: - wait_start = time.perf_counter() if output_timeout else None - sel.select(timeout) - - # On timeout raise error (we need to measure time on our own, because - # select() may exit early even if there is no data to read and the - # timeout was not reached.) - if output_timeout: - wait_end = time.perf_counter() - if wait_end - wait_start >= args.timeout: - logging.info("Process did not write any output for " + - str(args.timeout) + " seconds. Killing it.") - logging.info("NOTE: The timeout can be increased with" - " 'pmbootstrap -t'.") - kill_command(args, process.pid, sudo) - continue - - # Read all currently available output - pipe_read(process, output_to_stdout, output_return, - output_buffer) - - # There may still be output after the process quit - pipe_read(process, output_to_stdout, output_return, output_buffer) - - # Return the return code and output (the output gets built as list of - # output chunks and combined at the end, this is faster than extending the - # combined string with each new chunk) - return (process.returncode, b"".join(output_buffer).decode("utf-8")) - - -def foreground_tui(cmd, working_dir=None): - """ - Run a subprocess in foreground without redirecting any of its output. - - This is the only way text-based user interfaces (ncurses programs like - vim, nano or the kernel's menuconfig) work properly. - """ - - logging.debug("*** output passed to pmbootstrap stdout, not to this log" - " ***") - process = subprocess.Popen(cmd, cwd=working_dir) - return process.wait() - - -def check_return_code(args, code, log_message): - """ - Check the return code of a command. - - :param code: exit code to check - :param log_message: simplified and more readable form of the command, e.g. - "(native) % echo test" instead of the full command with - entering the chroot and more escaping - :raises RuntimeError: when the code indicates that the command failed - """ - - if code: - logging.debug("^" * 70) - logging.info("NOTE: The failed command's output is above the ^^^ line" - " in the log file: " + args.log) - raise RuntimeError(f"Command failed (exit code {str(code)}): " + - log_message) - - -def sudo_timer_iterate(): - """ - Run sudo -v and schedule a new timer to repeat the same. - """ - - if pmb.config.which_sudo() == "sudo": - subprocess.Popen(["sudo", "-v"]).wait() - else: - subprocess.Popen(pmb.config.sudo(["true"])).wait() - - timer = threading.Timer(interval=60, function=sudo_timer_iterate) - timer.daemon = True - timer.start() - - -def sudo_timer_start(): - """ - Start a timer to call sudo -v periodically, so that the password is only - needed once. - """ - - if "sudo_timer_active" in pmb.helpers.other.cache: - return - pmb.helpers.other.cache["sudo_timer_active"] = True - - sudo_timer_iterate() - - -def core(args, log_message, cmd, working_dir=None, output="log", - output_return=False, check=None, sudo=False, disable_timeout=False): - """ - Run a command and create a log entry. - - This is a low level function not meant to be used directly. Use one of the - following instead: pmb.helpers.run.user(), pmb.helpers.run.root(), - pmb.chroot.user(), pmb.chroot.root() - - :param log_message: simplified and more readable form of the command, e.g. - "(native) % echo test" instead of the full command with - entering the chroot and more escaping - :param cmd: command as list, e.g. ["echo", "string with spaces"] - :param working_dir: path in host system where the command should run - :param output: where to write the output (stdout and stderr) of the - process. We almost always write to the log file, which can - be read with "pmbootstrap log" (output values: "log", - "stdout", "interactive", "background"), so it's easy to - trace what pmbootstrap does. - - The exceptions are "tui" (text-based user interface), where - it does not make sense to write to the log file (think of - ncurses UIs, such as "menuconfig") and "pipe" where the - output is written to a pipe for manual asynchronous - consumption by the caller. - - When the output is not set to "interactive", "tui", - "background" or "pipe", we kill the process if it does not - output anything for 5 minutes (time can be set with - "pmbootstrap --timeout"). - - The table below shows all possible values along with - their properties. "wait" indicates that we wait for the - process to complete. - - output value | timeout | out to log | out to stdout | wait | pass stdin - ------------------------------------------------------------------------ - "log" | x | x | | x | - "stdout" | x | x | x | x | - "interactive" | | x | x | x | x - "tui" | | | x | x | x - "background" | | x | | | - "pipe" | | | | | - - :param output_return: in addition to writing the program's output to the - destinations above in real time, write to a buffer - and return it as string when the command has - completed. This is not possible when output is - "background", "pipe" or "tui". - :param check: an exception will be raised when the command's return code - is not 0. Set this to False to disable the check. This - parameter can not be used when the output is "background" or - "pipe". - :param sudo: use sudo to kill the process when it hits the timeout. - :returns: * program's return code (default) - * subprocess.Popen instance (output is "background" or "pipe") - * the program's entire output (output_return is True) - """ - sanity_checks(output, output_return, check) - - # Preserve proxy environment variables - env = {} - for var in ["FTP_PROXY", "ftp_proxy", "HTTP_PROXY", "http_proxy", - "HTTPS_PROXY", "https_proxy", "HTTP_PROXY_AUTH"]: - if var in os.environ: - env[var] = os.environ[var] - if env: - cmd = ["sh", "-c", flat_cmd(cmd, env=env)] - - if args.sudo_timer and sudo: - sudo_timer_start() - - # Log simplified and full command (pmbootstrap -v) - logging.debug(log_message) - logging.verbose("run: " + str(cmd)) - - # Background - if output == "background": - return background(cmd, working_dir) - - # Pipe - if output == "pipe": - return pipe(cmd, working_dir) - - # Foreground - output_after_run = "" - if output == "tui": - # Foreground TUI - code = foreground_tui(cmd, working_dir) - else: - # Foreground pipe (always redirects to the error log file) - output_to_stdout = False - if not args.details_to_stdout and output in ["stdout", "interactive"]: - output_to_stdout = True - - output_timeout = output in ["log", "stdout"] and not disable_timeout - - stdin = subprocess.DEVNULL if output in ["log", "stdout"] else None - - (code, output_after_run) = foreground_pipe(args, cmd, working_dir, - output_to_stdout, - output_return, - output_timeout, - sudo, stdin) - - # Check the return code - if check is not False: - check_return_code(args, code, log_message) - - # Return (code or output string) - return output_after_run if output_return else code diff --git a/pmb/helpers/status.py b/pmb/helpers/status.py deleted file mode 100644 index 657c27a3..00000000 --- a/pmb/helpers/status.py +++ /dev/null @@ -1,163 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import logging - -import pmb.config -import pmb.config.workdir -import pmb.helpers.git - - -def print_config(args): - """ Print an overview of what was set in "pmbootstrap init". """ - logging.info("*** CONFIG ***") - info = args.deviceinfo - logging.info("Device: {} ({}, \"{}\")" - .format(args.device, info["arch"], info["name"])) - - if pmb.parse._apkbuild.kernels(args, args.device): - logging.info("Kernel: " + args.kernel) - - if args.extra_packages != "none": - logging.info("Extra packages: {}".format(args.extra_packages)) - - logging.info("User Interface: {}".format(args.ui)) - - -def print_git_repos(args): - logging.info("*** GIT REPOS ***") - logging.info("Path: {}/cache_git".format(args.work)) - for repo in pmb.config.git_repos.keys(): - path = pmb.helpers.git.get_path(args, repo) - if not os.path.exists(path): - continue - - # Get branch name (if on branch) or current commit - ref = pmb.helpers.git.rev_parse(args, path, - extra_args=["--abbrev-ref"]) - if ref == "HEAD": - ref = pmb.helpers.git.rev_parse(args, path)[0:8] - - logging.info("- {} ({})".format(repo, ref)) - - -def print_checks_git_repo(args, repo, details=True): - """ Perform various checks on one checked out git repo. - :param details: if True, print each passing check (this is True by - default for the testsuite) - :returns: status, todo_msg - - status: integer, 0 if all passed, < 0 on failure - - msg_todo: message to help the user resolve the failure """ - def log_ok(msg_ok): - if details: - logging.info("[OK ] {}: {}".format(repo, msg_ok)) - - def log_nok_ret(status, msg_nok, msg_todo): - logging.warning("[NOK] {}: {}".format(repo, msg_nok)) - return (status, msg_todo) - - # On official branch - path = pmb.helpers.git.get_path(args, repo) - branches = pmb.helpers.git.get_branches_official(args, repo) - ref = pmb.helpers.git.rev_parse(args, path, extra_args=["--abbrev-ref"]) - if ref not in branches: - return log_nok_ret(-1, "not on official channel branch", - "consider checking out: " + ", ".join(branches)) - log_ok("on official channel branch") - - # Workdir clean - if not pmb.helpers.git.clean_worktree(args, path): - return log_nok_ret(-2, "workdir is not clean", - "consider cleaning your workdir") - log_ok("workdir is clean") - - # Tracking proper remote - remote_upstream = pmb.helpers.git.get_upstream_remote(args, repo) - branch_upstream = remote_upstream + "/" + ref - remote_ref = pmb.helpers.git.rev_parse(args, path, ref + "@{u}", - ["--abbrev-ref"]) - if remote_ref != branch_upstream: - return log_nok_ret(-3, "tracking unexpected remote branch", - "consider tracking remote branch '{}' instead of" - " '{}'".format(branch_upstream, remote_ref)) - log_ok("tracking proper remote branch '{}'".format(branch_upstream)) - - # Up to date - ref_branch = pmb.helpers.git.rev_parse(args, path, ref) - ref_branch_upstream = pmb.helpers.git.rev_parse(args, path, - branch_upstream) - if ref_branch != ref_branch_upstream: - return log_nok_ret(-4, "not up to date with remote branch", - "update with 'pmbootstrap pull'") - log_ok("up to date with remote branch") - - # Outdated remote information - if pmb.helpers.git.is_outdated(path): - return log_nok_ret(-5, "outdated remote information", - "update with 'pmbootstrap pull'") - log_ok("remote information updated recently (via git fetch/pull)") - - return (0, "") - - -def print_checks_git_repos(args, details): - """ Perform various checks on the checked out git repos. - :param details: if True, print each passing check - :returns: list of unresolved checklist items """ - ret = [] - for repo in pmb.config.git_repos.keys(): - path = pmb.helpers.git.get_path(args, repo) - if not os.path.exists(path): - continue - status, todo_msg = print_checks_git_repo(args, repo, details) - if status: - ret += ["{}: {}".format(repo, todo_msg)] - return ret - - -def print_checks_chroots_outdated(args, details): - """ Check if chroots were zapped recently. - :param details: if True, print each passing check instead of a summary - :returns: list of unresolved checklist items """ - if pmb.config.workdir.chroots_outdated(args): - logging.info("[NOK] Chroots not zapped recently") - return ["Run 'pmbootstrap zap' to delete possibly outdated chroots"] - elif details: - logging.info("[OK ] Chroots zapped recently (or non-existing)") - return [] - - -def print_checks(args, details): - """ :param details: if True, print each passing check instead of a summary - :returns: True if all checks passed, False otherwise """ - logging.info("*** CHECKS ***") - checklist = [] - checklist += print_checks_chroots_outdated(args, details) - checklist += print_checks_git_repos(args, details) - - # All OK - if not checklist: - if not details: - logging.info("All checks passed! \\o/") - logging.info("") - return True - - # Some NOK: print checklist - logging.info("") - logging.info("*** CHECKLIST ***") - for item in checklist: - logging.info("- " + item) - logging.info("- Run 'pmbootstrap status' to verify that all is resolved") - return False - - -def print_status(args, details=False): - """ :param details: if True, print each passing check instead of a summary - :returns: True if all checks passed, False otherwise """ - print_config(args) - logging.info("") - print_git_repos(args) - logging.info("") - ret = print_checks(args, details) - - return ret diff --git a/pmb/helpers/ui.py b/pmb/helpers/ui.py deleted file mode 100644 index 039be55d..00000000 --- a/pmb/helpers/ui.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2023 Clayton Craft -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import glob -import pmb.parse - - -def list(args, arch): - """ - Get all UIs, for which aports are available with their description. - - :param arch: device architecture, for which the UIs must be available - :returns: [("none", "No graphical..."), ("weston", "Wayland reference...")] - """ - ret = [("none", "Bare minimum OS image for testing and manual" - " customization. The \"console\" UI should be selected if" - " a graphical UI is not desired.")] - for path in sorted(glob.glob(args.aports + "/main/postmarketos-ui-*")): - apkbuild = pmb.parse.apkbuild(f"{path}/APKBUILD") - ui = os.path.basename(path).split("-", 2)[2] - if pmb.helpers.package.check_arch(args, apkbuild["pkgname"], arch): - ret.append((ui, apkbuild["pkgdesc"])) - return ret diff --git a/pmb/install/__init__.py b/pmb/install/__init__.py deleted file mode 100644 index d44288ce..00000000 --- a/pmb/install/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -from pmb.install._install import install -from pmb.install._install import get_kernel_package -from pmb.install.partition import partition -from pmb.install.partition import partition_cgpt -from pmb.install.format import format -from pmb.install.format import get_root_filesystem -from pmb.install.partition import partitions_mount diff --git a/pmb/install/_install.py b/pmb/install/_install.py deleted file mode 100644 index 59fdf8e0..00000000 --- a/pmb/install/_install.py +++ /dev/null @@ -1,1277 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import os -import re -import glob -import shlex -import sys - -import pmb.chroot -import pmb.chroot.apk -import pmb.chroot.other -import pmb.chroot.initfs -import pmb.config -import pmb.config.pmaports -import pmb.helpers.devices -import pmb.helpers.run -import pmb.install.blockdevice -import pmb.install.recovery -import pmb.install.ui -import pmb.install - - -def mount_device_rootfs(args, suffix_rootfs, suffix_mount="native"): - """ - Mount the device rootfs. - :param suffix_rootfs: the chroot suffix, where the rootfs that will be - installed on the device has been created (e.g. - "rootfs_qemu-amd64") - :param suffix_mount: the chroot suffix, where the device rootfs will be - mounted (e.g. "native") - """ - mountpoint = f"/mnt/{suffix_rootfs}" - pmb.helpers.mount.bind(args, f"{args.work}/chroot_{suffix_rootfs}", - f"{args.work}/chroot_{suffix_mount}{mountpoint}") - return mountpoint - - -def get_subpartitions_size(args, suffix): - """ - Calculate the size of the boot and root subpartition. - - :param suffix: the chroot suffix, e.g. "rootfs_qemu-amd64" - :returns: (boot, root) the size of the boot and root - partition as integer in MiB - """ - boot = int(args.boot_size) - - # Estimate root partition size, then add some free space. The size - # calculation is not as trivial as one may think, and depending on the - # file system etc it seems to be just impossible to get it right. - chroot = f"{args.work}/chroot_{suffix}" - root = pmb.helpers.other.folder_size(args, chroot) / 1024 - root *= 1.20 - root += 50 + int(args.extra_space) - return (boot, root) - - -def get_nonfree_packages(args, device): - """ - Get the non-free packages based on user's choice in "pmbootstrap init" and - based on whether there are non-free packages in the APKBUILD or not. - - :returns: list of non-free packages to be installed. Example: - ["device-nokia-n900-nonfree-firmware"] - """ - # Read subpackages - apkbuild = pmb.parse.apkbuild(pmb.helpers.devices.find_path(args, device, - 'APKBUILD')) - subpackages = apkbuild["subpackages"] - - # Check for firmware and userland - ret = [] - prefix = "device-" + device + "-nonfree-" - if args.nonfree_firmware and prefix + "firmware" in subpackages: - ret += [prefix + "firmware"] - if args.nonfree_userland and prefix + "userland" in subpackages: - ret += [prefix + "userland"] - return ret - - -def get_kernel_package(args, device): - """ - Get the device's kernel subpackage based on the user's choice in - "pmbootstrap init". - - :param device: code name, e.g. "sony-amami" - :returns: [] or the package in a list, e.g. - ["device-sony-amami-kernel-mainline"] - """ - # Empty list: single kernel devices / "none" selected - kernels = pmb.parse._apkbuild.kernels(args, device) - if not kernels or args.kernel == "none": - return [] - - # Sanity check - if args.kernel not in kernels: - raise RuntimeError("Selected kernel (" + args.kernel + ") is not" - " valid for device " + device + ". Please" - " run 'pmbootstrap init' to select a valid kernel.") - - # Selected kernel subpackage - return ["device-" + device + "-kernel-" + args.kernel] - - -def copy_files_from_chroot(args, suffix): - """ - Copy all files from the rootfs chroot to /mnt/install, except - for the home folder (because /home will contain some empty - mountpoint folders). - - :param suffix: the chroot suffix, e.g. "rootfs_qemu-amd64" - """ - # Mount the device rootfs - logging.info(f"(native) copy {suffix} to /mnt/install/") - mountpoint = mount_device_rootfs(args, suffix) - mountpoint_outside = args.work + "/chroot_native" + mountpoint - - # Remove empty qemu-user binary stub (where the binary was bind-mounted) - arch_qemu = pmb.parse.arch.alpine_to_qemu(args.deviceinfo["arch"]) - qemu_binary = mountpoint_outside + "/usr/bin/qemu-" + arch_qemu + "-static" - if os.path.exists(qemu_binary): - pmb.helpers.run.root(args, ["rm", qemu_binary]) - - # Remove apk progress fifo - fifo = f"{args.work}/chroot_{suffix}/tmp/apk_progress_fifo" - if os.path.exists(fifo): - pmb.helpers.run.root(args, ["rm", fifo]) - - # Get all folders inside the device rootfs (except for home) - folders = [] - for path in glob.glob(mountpoint_outside + "/*"): - if path.endswith("/home"): - continue - folders += [os.path.basename(path)] - - # Update or copy all files - if args.rsync: - pmb.chroot.apk.install(args, ["rsync"]) - rsync_flags = "-a" - if args.verbose: - rsync_flags += "vP" - pmb.chroot.root(args, ["rsync", rsync_flags, "--delete"] + folders + - ["/mnt/install/"], working_dir=mountpoint) - pmb.chroot.root(args, ["rm", "-rf", "/mnt/install/home"]) - else: - pmb.chroot.root(args, ["cp", "-a"] + folders + ["/mnt/install/"], - working_dir=mountpoint) - - -def create_home_from_skel(args): - """ - Create /home/{user} from /etc/skel - """ - rootfs = args.work + "/chroot_native/mnt/install" - if args.filesystem == "btrfs": - pmb.helpers.run.root(args, - ["btrfs", "subvol", "create", rootfs + "/home"]) - else: - pmb.helpers.run.root(args, ["mkdir", rootfs + "/home"]) - homedir = rootfs + "/home/" + args.user - if os.path.exists(f"{rootfs}/etc/skel"): - pmb.helpers.run.root(args, ["cp", "-a", f"{rootfs}/etc/skel", homedir]) - else: - pmb.helpers.run.root(args, ["mkdir", homedir]) - pmb.helpers.run.root(args, ["chown", "-R", "10000", homedir]) - - -def configure_apk(args): - """ - Copy over all official keys, and the keys used to compile local packages - (unless --no-local-pkgs is set). Then copy the corresponding APKINDEX files - and remove the /mnt/pmbootstrap/packages repository. - """ - # Official keys - pattern = f"{pmb.config.apk_keys_path}/*.pub" - - # Official keys + local keys - if args.install_local_pkgs: - pattern = f"{args.work}/config_apk_keys/*.pub" - - # Copy over keys - rootfs = args.work + "/chroot_native/mnt/install" - for key in glob.glob(pattern): - pmb.helpers.run.root(args, ["cp", key, rootfs + "/etc/apk/keys/"]) - - # Copy over the corresponding APKINDEX files from cache - index_files = pmb.helpers.repo.apkindex_files(args, - arch=args.deviceinfo["arch"], - user_repository=False) - for f in index_files: - pmb.helpers.run.root(args, ["cp", f, rootfs + "/var/cache/apk/"]) - - # Disable pmbootstrap repository - pmb.helpers.run.root(args, ["sed", "-i", r"/\/mnt\/pmbootstrap\/packages/d", - rootfs + "/etc/apk/repositories"]) - pmb.helpers.run.user(args, ["cat", rootfs + "/etc/apk/repositories"]) - - -def set_user(args): - """ - Create user with UID 10000 if it doesn't exist. - Usually the ID for the first user created is 1000, but higher ID is - chosen here to not cause issues with existing installations. Historically, - this was done to avoid conflict with Android UIDs/GIDs, but pmOS has since - dropped support for hybris/Halium. - """ - suffix = "rootfs_" + args.device - if not pmb.chroot.user_exists(args, args.user, suffix): - pmb.chroot.root(args, ["adduser", "-D", "-u", "10000", args.user], - suffix) - - pmaports_cfg = pmb.config.pmaports.read_config(args) - groups = [] - groups += pmaports_cfg.get("install_user_groups", - "audio,input,netdev,plugdev,video,wheel").split(",") - groups += pmb.install.ui.get_groups(args) - - for group in groups: - pmb.chroot.root(args, ["addgroup", "-S", group], suffix, - check=False) - pmb.chroot.root(args, ["addgroup", args.user, group], suffix) - - -def setup_login_chpasswd_user_from_arg(args, suffix): - """ - Set the user's password from what the user passed as --password. Make an - effort to not have the password end up in the log file by writing it to - a temp file, instead of "echo user:$pass | chpasswd". The user should of - course only use this with a test password anyway, but let's be nice and try - to have the user protected from accidentally posting their password in - any case. - - :param suffix: of the chroot, where passwd will be execute (either the - f"rootfs_{args.device}", or f"installer_{args.device}") - """ - path = "/tmp/pmbootstrap_chpasswd_in" - path_outside = f"{args.work}/chroot_{suffix}{path}" - - with open(path_outside, "w", encoding="utf-8") as handle: - handle.write(f"{args.user}:{args.password}") - - pmb.chroot.root(args, ["sh", "-c", f"cat {shlex.quote(path)} | chpasswd"], - suffix) - - os.unlink(path_outside) - - -def is_root_locked(args, suffix): - """ - Figure out from /etc/shadow if root is already locked. The output of this - is stored in the log, so use grep to only log the line for root, not the - line for the user which contains a hash of the user's password. - - :param suffix: either rootfs_{args.device} or installer_{args.device} - """ - shadow_root = pmb.chroot.root(args, ["grep", "^root:!:", "/etc/shadow"], - suffix, output_return=True, check=False) - return shadow_root.startswith("root:!:") - - -def setup_login(args, suffix): - """ - Loop until the password for user has been set successfully, and disable - root login. - - :param suffix: of the chroot, where passwd will be execute (either the - f"rootfs_{args.device}", or f"installer_{args.device}") - """ - if not args.on_device_installer: - # User password - logging.info(f" *** SET LOGIN PASSWORD FOR: '{args.user}' ***") - if args.password: - setup_login_chpasswd_user_from_arg(args, suffix) - else: - while True: - try: - pmb.chroot.root(args, ["passwd", args.user], suffix, - output="interactive") - break - except RuntimeError: - logging.info("WARNING: Failed to set the password. Try it" - " one more time.") - - # Disable root login - if is_root_locked(args, suffix): - logging.debug(f"({suffix}) root is already locked") - else: - logging.debug(f"({suffix}) locking root") - pmb.chroot.root(args, ["passwd", "-l", "root"], suffix) - - -def copy_ssh_keys(args): - """ - If requested, copy user's SSH public keys to the device if they exist - """ - if not args.ssh_keys: - return - keys = [] - for key in glob.glob(os.path.expanduser(args.ssh_key_glob)): - with open(key, "r") as infile: - keys += infile.readlines() - - if not len(keys): - logging.info("NOTE: Public SSH keys not found. Since no SSH keys " - "were copied, you will need to use SSH password " - "authentication!") - return - - authorized_keys = args.work + "/chroot_native/tmp/authorized_keys" - outfile = open(authorized_keys, "w") - for key in keys: - outfile.write("%s" % key) - outfile.close() - - target = f"{args.work}/chroot_native/mnt/install/home/{args.user}/.ssh" - pmb.helpers.run.root(args, ["mkdir", target]) - pmb.helpers.run.root(args, ["chmod", "700", target]) - pmb.helpers.run.root(args, ["cp", authorized_keys, target + - "/authorized_keys"]) - pmb.helpers.run.root(args, ["rm", authorized_keys]) - pmb.helpers.run.root(args, ["chown", "-R", "10000:10000", target]) - - -def setup_keymap(args): - """ - Set the keymap with the setup-keymap utility if the device requires it - """ - suffix = "rootfs_" + args.device - info = pmb.parse.deviceinfo(args, device=args.device) - if "keymaps" not in info or info["keymaps"].strip() == "": - logging.info("NOTE: No valid keymap specified for device") - return - options = info["keymaps"].split(' ') - if (args.keymap != "" and - args.keymap is not None and - args.keymap in options): - layout, variant = args.keymap.split("/") - pmb.chroot.root(args, ["setup-keymap", layout, variant], suffix, - output="interactive") - - # Check xorg config - config = None - if os.path.exists(f"{args.work}/chroot_{suffix}/etc/X11/xorg.conf.d"): - config = pmb.chroot.root(args, ["grep", "-rl", "XkbLayout", - "/etc/X11/xorg.conf.d/"], - suffix, check=False, output_return=True) - if config: - # Nokia n900 (RX-51) randomly merges some keymaps so we - # have to specify a composite keymap for a few countries. See: - # https://gitlab.freedesktop.org/xkeyboard-config/xkeyboard-config/-/blob/master/symbols/nokia_vndr/rx-51 - if variant == "rx51_fi" or variant == "rx51_se": - layout = "fise" - if variant == "rx51_da" or variant == "rx51_no": - layout = "dano" - if variant == "rx51_pt" or variant == "rx51_es": - layout = "ptes" - # Multiple files can contain the keyboard layout, take last - config = config.splitlines()[-1] - old_text = "Option *\\\"XkbLayout\\\" *\\\".*\\\"" - new_text = "Option \\\"XkbLayout\\\" \\\"" + layout + "\\\"" - pmb.chroot.root(args, ["sed", "-i", "s/" + old_text + "/" + - new_text + "/", config], suffix) - else: - logging.info("NOTE: No valid keymap specified for device") - - -def setup_timezone(args): - suffix = f"rootfs_{args.device}" - - arch = args.deviceinfo["arch"] - alpine_conf = pmb.helpers.package.get(args, "alpine-conf", arch) - version = alpine_conf["version"].split("-r")[0] - - setup_tz_cmd = ["setup-timezone"] - # setup-timezone will, by default, copy the timezone to /etc/zoneinfo - # and disregard tzdata, to save space. If we actually have tzdata - # installed, make sure that setup-timezone makes use of it, since - # there's no space to be saved. - if "tzdata" in pmb.chroot.apk.installed(args, suffix): - setup_tz_cmd += ["-i"] - if not pmb.parse.version.check_string(version, ">=3.14.0"): - setup_tz_cmd += ["-z"] - setup_tz_cmd += [args.timezone] - pmb.chroot.root(args, setup_tz_cmd, suffix) - - -def setup_hostname(args): - """ - Set the hostname and update localhost address in /etc/hosts - """ - # Default to device name - hostname = args.hostname - if not hostname: - hostname = args.device - - if not pmb.helpers.other.validate_hostname(hostname): - raise RuntimeError("Hostname '" + hostname + "' is not valid, please" - " run 'pmbootstrap init' to configure it.") - - suffix = "rootfs_" + args.device - # Generate /etc/hostname - pmb.chroot.root(args, ["sh", "-c", "echo " + shlex.quote(hostname) + - " > /etc/hostname"], suffix) - # Update /etc/hosts - regex = (r"s/^127\.0\.0\.1.*/127.0.0.1\t" + re.escape(hostname) + - " localhost.localdomain localhost/") - pmb.chroot.root(args, ["sed", "-i", "-e", regex, "/etc/hosts"], suffix) - - -def setup_appstream(args): - """ - If alpine-appstream-downloader has been downloaded, execute it to have - update AppStream data on new installs - """ - suffix = "rootfs_" + args.device - installed_pkgs = pmb.chroot.apk.installed(args, suffix) - - if "alpine-appstream-downloader" not in installed_pkgs or args.offline: - return - - if not pmb.chroot.root(args, ["alpine-appstream-downloader", - "/mnt/appstream-data"], suffix, check=False): - pmb.chroot.root(args, ["mkdir", "-p", "/var/lib/swcatalog"], suffix) - pmb.chroot.root(args, ["cp", "-r", "/mnt/appstream-data/icons", - "/mnt/appstream-data/xml", - "-t", "/var/lib/swcatalog"], suffix) - - -def disable_sshd(args): - if not args.no_sshd: - return - - # check=False: rc-update doesn't exit with 0 if already disabled - suffix = f"rootfs_{args.device}" - pmb.chroot.root(args, ["rc-update", "del", "sshd", "default"], suffix, - check=False) - - # Verify that it's gone - sshd_files = pmb.helpers.run.root( - args, ["find", "-name", "sshd"], output_return=True, - working_dir=f"{args.work}/chroot_{suffix}/etc/runlevels") - if sshd_files: - raise RuntimeError(f"Failed to disable sshd service: {sshd_files}") - - -def print_sshd_info(args): - logging.info("") # make the note stand out - logging.info("*** SSH DAEMON INFORMATION ***") - - if not args.ondev_no_rootfs: - if args.no_sshd: - logging.info("SSH daemon is disabled (--no-sshd).") - else: - logging.info("SSH daemon is enabled (disable with --no-sshd).") - logging.info(f"Login as '{args.user}' with the password given" - " during installation.") - - if args.on_device_installer: - # We don't disable sshd in the installer OS. If the device is reachable - # on the network by default (e.g. Raspberry Pi), one can lock down the - # installer OS down by disabling the debug user (see wiki page). - logging.info("SSH daemon is enabled in the installer OS, to allow" - " debugging the installer image.") - logging.info("More info: https://postmarketos.org/ondev-debug") - - -def disable_firewall(args): - if not args.no_firewall: - return - - # check=False: rc-update doesn't exit with 0 if already disabled - suffix = f"rootfs_{args.device}" - pmb.chroot.root(args, ["rc-update", "del", "nftables", "default"], suffix, - check=False) - - # Verify that it's gone - nftables_files = pmb.helpers.run.root( - args, ["find", "-name", "nftables"], output_return=True, - working_dir=f"{args.work}/chroot_{suffix}/etc/runlevels") - if nftables_files: - raise RuntimeError(f"Failed to disable firewall: {nftables_files}") - - -def print_firewall_info(args): - pmaports_cfg = pmb.config.pmaports.read_config(args) - pmaports_ok = pmaports_cfg.get("supported_firewall", None) == "nftables" - - # Find kernel pmaport (will not be found if Alpine kernel is used) - apkbuild_found = False - apkbuild_has_opt = False - - arch = args.deviceinfo["arch"] - kernel = get_kernel_package(args, args.device) - if kernel: - kernel_apkbuild = pmb.build._package.get_apkbuild(args, kernel[0], - arch) - if kernel_apkbuild: - opts = kernel_apkbuild["options"] - apkbuild_has_opt = "pmb:kconfigcheck-nftables" in opts - apkbuild_found = True - - # Print the note and make it stand out - logging.info("") - logging.info("*** FIREWALL INFORMATION ***") - - if not pmaports_ok: - logging.info("Firewall is not supported in checked out pmaports" - " branch.") - elif args.no_firewall: - logging.info("Firewall is disabled (--no-firewall).") - elif not apkbuild_found: - logging.info("Firewall is enabled, but may not work (couldn't" - " determine if kernel supports nftables).") - elif apkbuild_has_opt: - logging.info("Firewall is enabled and supported by kernel.") - else: - logging.info("Firewall is enabled, but will not work (no support in" - " kernel config for nftables).") - logging.info("If/when the kernel supports it in the future, it" - " will work automatically.") - - logging.info("For more information: https://postmarketos.org/firewall") - - -def generate_binary_list(args, suffix, step): - """ - Perform three checks prior to writing binaries to disk: 1) that binaries - exist, 2) that binaries do not extend into the first partition, 3) that - binaries do not overlap each other. - - :param suffix: of the chroot, which holds the firmware files (either the - f"rootfs_{args.device}", or f"installer_{args.device}") - :param step: partition step size in bytes - """ - binary_ranges = {} - binary_list = [] - binaries = args.deviceinfo["sd_embed_firmware"].split(",") - - for binary_offset in binaries: - binary, offset = binary_offset.split(':') - try: - offset = int(offset) - except ValueError: - raise RuntimeError("Value for firmware binary offset is " - f"not valid: {offset}") - binary_path = os.path.join(args.work, f"chroot_{suffix}", "usr/share", - binary) - if not os.path.exists(binary_path): - raise RuntimeError("The following firmware binary does not " - f"exist in the {suffix} chroot: " - f"/usr/share/{binary}") - # Insure that embedding the firmware will not overrun the - # first partition - boot_part_start = args.deviceinfo["boot_part_start"] or "2048" - max_size = (int(boot_part_start) * 512) - (offset * step) - binary_size = os.path.getsize(binary_path) - if binary_size > max_size: - raise RuntimeError("The firmware is too big to embed in the " - f"disk image {binary_size}B > {max_size}B") - # Insure that the firmware does not conflict with any other firmware - # that will be embedded - binary_start = offset * step - binary_end = binary_start + binary_size - for start, end in binary_ranges.items(): - if ((binary_start >= start and binary_start < end) or - (binary_end > start and binary_end <= end)): - raise RuntimeError("The firmware overlaps with at least one " - f"other firmware image: {binary}") - - binary_ranges[binary_start] = binary_end - binary_list.append((binary, offset)) - - return binary_list - - -def embed_firmware(args, suffix): - """ - This method will embed firmware, located at /usr/share, that are specified - by the "sd_embed_firmware" deviceinfo parameter into the SD card image - (e.g. u-boot). Binaries that would overwrite the first partition are not - accepted, and if multiple binaries are specified then they will be checked - for collisions with each other. - - :param suffix: of the chroot, which holds the firmware files (either the - f"rootfs_{args.device}", or f"installer_{args.device}") - """ - if not args.deviceinfo["sd_embed_firmware"]: - return - - step = 1024 - if args.deviceinfo["sd_embed_firmware_step_size"]: - try: - step = int(args.deviceinfo["sd_embed_firmware_step_size"]) - except ValueError: - raise RuntimeError("Value for " - "deviceinfo_sd_embed_firmware_step_size " - "is not valid: {}".format(step)) - - device_rootfs = mount_device_rootfs(args, suffix) - binary_list = generate_binary_list(args, suffix, step) - - # Write binaries to disk - for binary, offset in binary_list: - binary_file = os.path.join("/usr/share", binary) - logging.info("Embed firmware {} in the SD card image at offset {} with" - " step size {}".format(binary, offset, step)) - filename = os.path.join(device_rootfs, binary_file.lstrip("/")) - pmb.chroot.root(args, ["dd", "if=" + filename, "of=/dev/install", - "bs=" + str(step), "seek=" + str(offset)]) - - -def write_cgpt_kpart(args, layout, suffix): - """ - Write the kernel to the ChromeOS kernel partition. - - :param layout: partition layout from get_partition_layout() - :param suffix: of the chroot, which holds the image file to be flashed - """ - if not args.deviceinfo["cgpt_kpart"] or not args.install_cgpt: - return - - device_rootfs = mount_device_rootfs(args, suffix) - filename = f"{device_rootfs}{args.deviceinfo['cgpt_kpart']}" - pmb.chroot.root( - args, ["dd", f"if={filename}", f"of=/dev/installp{layout['kernel']}"]) - - -def sanity_check_boot_size(args): - default = pmb.config.defaults["boot_size"] - if int(args.boot_size) >= int(default): - return - logging.error("ERROR: your pmbootstrap has a small boot_size of" - f" {args.boot_size} configured, probably because the config" - " has been created with an old version.") - logging.error("This can lead to problems later on, we recommend setting it" - f" to {default} MiB.") - logging.error(f"Run 'pmbootstrap config boot_size {default}' and try again.") - sys.exit(1) - - -def sanity_check_disk(args): - device = args.disk - device_name = os.path.basename(device) - if not os.path.exists(device): - raise RuntimeError(f"{device} doesn't exist, is the disk plugged?") - if os.path.isdir('/sys/class/block/{}'.format(device_name)): - with open('/sys/class/block/{}/ro'.format(device_name), 'r') as handle: - ro = handle.read() - if ro == '1\n': - raise RuntimeError(f"{device} is read-only, maybe a locked SD card?") - - -def sanity_check_disk_size(args): - device = args.disk - devpath = os.path.realpath(device) - sysfs = '/sys/class/block/{}/size'.format(devpath.replace('/dev/', '')) - if not os.path.isfile(sysfs): - # This is a best-effort sanity check, continue if it's not checkable - return - - with open(sysfs) as handle: - raw = handle.read() - - # Size is in 512-byte blocks - size = int(raw.strip()) - human = "{:.2f} GiB".format(size / 2 / 1024 / 1024) - - # Warn if the size is larger than 100GiB - if size > (100 * 2 * 1024 * 1024): - if not pmb.helpers.cli.confirm(args, - f"WARNING: The target disk ({devpath}) " - "is larger than a usual SD card " - "(>100GiB). Are you sure you want to " - f"overwrite this {human} disk?", - no_assumptions=True): - raise RuntimeError("Aborted.") - - -def get_ondev_pkgver(args): - arch = args.deviceinfo["arch"] - package = pmb.helpers.package.get(args, "postmarketos-ondev", arch) - return package["version"].split("-r")[0] - - -def sanity_check_ondev_version(args): - ver_pkg = get_ondev_pkgver(args) - ver_min = pmb.config.ondev_min_version - if pmb.parse.version.compare(ver_pkg, ver_min) == -1: - raise RuntimeError("This version of pmbootstrap requires" - f" postmarketos-ondev version {ver_min} or" - " higher. The postmarketos-ondev found in pmaports" - f" / in the binary packages has version {ver_pkg}.") - - -def get_partition_layout(reserve, kernel): - """ - :param reserve: create an empty partition between root and boot (pma#463) - :param kernel: create a separate kernel partition before all other - partitions, e.g. for the ChromeOS devices with cgpt - :returns: the partition layout, e.g. without reserve and kernel: - {"kernel": None, "boot": 1, "reserve": None, "root": 2} - """ - ret = {} - ret["kernel"] = None - ret["boot"] = 1 - ret["reserve"] = None - ret["root"] = 2 - - if kernel: - ret["kernel"] = 1 - ret["boot"] += 1 - ret["root"] += 1 - - if reserve: - ret["reserve"] = ret["root"] - ret["root"] += 1 - return ret - - -def get_uuid(args, partition): - """ - Get UUID of a partition - - :param partition: block device for getting UUID from - """ - return pmb.chroot.root( - args, - [ - "blkid", - "-s", "UUID", - "-o", "value", - partition, - ], - output_return=True - ).rstrip() - - -def create_crypttab(args, layout, suffix): - """ - Create /etc/crypttab config - - :param layout: partition layout from get_partition_layout() - :param suffix: of the chroot, which crypttab will be created to - """ - - luks_uuid = get_uuid(args, f"/dev/installp{layout['root']}") - - crypttab = f"root UUID={luks_uuid} none luks\n" - - open(f"{args.work}/chroot_{suffix}/tmp/crypttab", "w").write(crypttab) - pmb.chroot.root(args, ["mv", "/tmp/crypttab", "/etc/crypttab"], suffix) - - -def create_fstab(args, layout, suffix): - """ - Create /etc/fstab config - - :param layout: partition layout from get_partition_layout() - :param suffix: of the chroot, which fstab will be created to - """ - - # Do not install fstab into target rootfs when using on-device - # installer. Provide fstab only to installer suffix - if args.on_device_installer and "rootfs_" in suffix: - return - - boot_dev = f"/dev/installp{layout['boot']}" - root_dev = f"/dev/installp{layout['root']}" - - boot_mount_point = f"UUID={get_uuid(args, boot_dev)}" - root_mount_point = "/dev/mapper/root" if args.full_disk_encryption \ - else f"UUID={get_uuid(args, root_dev)}" - - boot_filesystem = args.deviceinfo["boot_filesystem"] or "ext2" - root_filesystem = pmb.install.get_root_filesystem(args) - - if root_filesystem == "btrfs": - # btrfs gets separate subvolumes for root, var and home - fstab = f""" -# -{root_mount_point} / {root_filesystem} subvol=root,compress=zstd:2,ssd 0 0 -{root_mount_point} /home {root_filesystem} subvol=home,compress=zstd:2,ssd 0 0 -{root_mount_point} /var {root_filesystem} subvol=var,compress=zstd:2,ssd 0 0 - -{boot_mount_point} /boot {boot_filesystem} defaults 0 0 -""".lstrip() - - else: - fstab = f""" -# -{root_mount_point} / {root_filesystem} defaults 0 0 -{boot_mount_point} /boot {boot_filesystem} defaults 0 0 -""".lstrip() - - with open(f"{args.work}/chroot_{suffix}/tmp/fstab", "w") as f: - f.write(fstab) - pmb.chroot.root(args, ["mv", "/tmp/fstab", "/etc/fstab"], suffix) - - -def install_system_image(args, size_reserve, suffix, step, steps, - boot_label="pmOS_boot", root_label="pmOS_root", - split=False, disk=None): - """ - :param size_reserve: empty partition between root and boot in MiB (pma#463) - :param suffix: the chroot suffix, where the rootfs that will be installed - on the device has been created (e.g. "rootfs_qemu-amd64") - :param step: next installation step - :param steps: total installation steps - :param boot_label: label of the boot partition (e.g. "pmOS_boot") - :param root_label: label of the root partition (e.g. "pmOS_root") - :param split: create separate images for boot and root partitions - :param disk: path to disk block device (e.g. /dev/mmcblk0) or None - """ - # Partition and fill image file/disk block device - logging.info(f"*** ({step}/{steps}) PREPARE INSTALL BLOCKDEVICE ***") - pmb.chroot.shutdown(args, True) - (size_boot, size_root) = get_subpartitions_size(args, suffix) - layout = get_partition_layout(size_reserve, args.deviceinfo["cgpt_kpart"] \ - and args.install_cgpt) - if not args.rsync: - pmb.install.blockdevice.create(args, size_boot, size_root, - size_reserve, split, disk) - if not split: - if args.deviceinfo["cgpt_kpart"] and args.install_cgpt: - pmb.install.partition_cgpt( - args, layout, size_boot, size_reserve) - else: - pmb.install.partition(args, layout, size_boot, size_reserve) - if not split: - pmb.install.partitions_mount(args, layout, disk) - - pmb.install.format(args, layout, boot_label, root_label, disk) - - # Create /etc/fstab and /etc/crypttab - logging.info("(native) create /etc/fstab") - create_fstab(args, layout, suffix) - if args.full_disk_encryption: - logging.info("(native) create /etc/crypttab") - create_crypttab(args, layout, suffix) - - # Run mkinitfs to pass UUIDs to cmdline - logging.info(f"({suffix}) mkinitfs") - pmb.chroot.root(args, ["mkinitfs"], suffix) - - # Clean up after running mkinitfs in chroot - pmb.helpers.mount.umount_all(args, f"{args.work}/chroot_{suffix}") - pmb.helpers.run.root(args, ["rm", f"{args.work}/chroot_{suffix}/in-pmbootstrap"]) - pmb.chroot.remove_mnt_pmbootstrap(args, suffix) - - # Just copy all the files - logging.info(f"*** ({step + 1}/{steps}) FILL INSTALL BLOCKDEVICE ***") - copy_files_from_chroot(args, suffix) - create_home_from_skel(args) - configure_apk(args) - copy_ssh_keys(args) - - # Don't try to embed firmware and cgpt on split images since there's no - # place to put it and it will end up in /dev of the chroot instead - if not split: - embed_firmware(args, suffix) - write_cgpt_kpart(args, layout, suffix) - - if disk: - logging.info(f"Unmounting disk {disk} (this may take a while " - "to sync, please wait)") - pmb.chroot.shutdown(args, True) - - # Convert rootfs to sparse using img2simg - sparse = args.sparse - if sparse is None: - sparse = args.deviceinfo["flash_sparse"] == "true" - - if sparse and not split and not disk: - logging.info("(native) make sparse rootfs") - pmb.chroot.apk.install(args, ["android-tools"]) - sys_image = args.device + ".img" - sys_image_sparse = args.device + "-sparse.img" - pmb.chroot.user(args, ["img2simg", sys_image, sys_image_sparse], - working_dir="/home/pmos/rootfs/") - pmb.chroot.user(args, ["mv", "-f", sys_image_sparse, sys_image], - working_dir="/home/pmos/rootfs/") - - # patch sparse image for Samsung devices if specified - samsungify_strategy = args.deviceinfo["flash_sparse_samsung_format"] - if samsungify_strategy: - logging.info("(native) convert sparse image into Samsung's sparse image format") - pmb.chroot.apk.install(args, ["sm-sparse-image-tool"]) - sys_image = f"{args.device}.img" - sys_image_patched = f"{args.device}-patched.img" - pmb.chroot.user(args, ["sm_sparse_image_tool", "samsungify", "--strategy", - samsungify_strategy, sys_image, sys_image_patched], - working_dir="/home/pmos/rootfs/") - pmb.chroot.user(args, ["mv", "-f", sys_image_patched, sys_image], - working_dir="/home/pmos/rootfs/") - - -def print_flash_info(args): - """ Print flashing information, based on the deviceinfo data and the - pmbootstrap arguments. """ - logging.info("") # make the note stand out - logging.info("*** FLASHING INFORMATION ***") - - # System flash information - method = args.deviceinfo["flash_method"] - flasher = pmb.config.flashers.get(method, {}) - flasher_actions = flasher.get("actions", {}) - requires_split = flasher.get("split", False) - - if method == "none": - logging.info("Refer to the installation instructions of your device," - " or the generic install instructions in the wiki.") - logging.info("https://wiki.postmarketos.org/wiki/Installation_guide" - "#pmbootstrap_flash") - return - - logging.info("Run the following to flash your installation to the" - " target device:") - - if "flash_rootfs" in flasher_actions and not args.disk and \ - bool(args.split) == requires_split: - logging.info("* pmbootstrap flasher flash_rootfs") - logging.info(" Flashes the generated rootfs image to your device:") - if args.split: - logging.info(f" {args.work}/chroot_native/home/pmos/rootfs/" - f"{args.device}-rootfs.img") - else: - logging.info(f" {args.work}/chroot_native/home/pmos/rootfs/" - f"{args.device}.img") - logging.info(" (NOTE: This file has a partition table, which" - " contains /boot and / subpartitions. That way we" - " don't need to change the partition layout on your" - " device.)") - - # if current flasher supports vbmeta and partition is explicitly specified - # in deviceinfo - if "flash_vbmeta" in flasher_actions and \ - (args.deviceinfo["flash_fastboot_partition_vbmeta"] or - args.deviceinfo["flash_heimdall_partition_vbmeta"]): - logging.info("* pmbootstrap flasher flash_vbmeta") - logging.info(" Flashes vbmeta image with verification disabled flag.") - - # if current flasher supports dtbo and partition is explicitly specified - # in deviceinfo - if "flash_dtbo" in flasher_actions and \ - (args.deviceinfo["flash_fastboot_partition_dtbo"] or - args.deviceinfo["flash_heimdall_partition_dtbo"]): - logging.info("* pmbootstrap flasher flash_dtbo") - logging.info(" Flashes dtbo image.") - - # Most flash methods operate independently of the boot partition. - # (e.g. an Android boot image is generated). In that case, "flash_kernel" - # works even when partitions are split or installing to disk. This is not - # possible if the flash method requires split partitions. - if "flash_kernel" in flasher_actions and \ - (not requires_split or args.split): - logging.info("* pmbootstrap flasher flash_kernel") - logging.info(" Flashes the kernel + initramfs to your device:") - if requires_split: - logging.info(f" {args.work}/chroot_native/home/pmos/rootfs/" - f"{args.device}-boot.img") - else: - logging.info(f" {args.work}/chroot_rootfs_{args.device}/boot") - - if "boot" in flasher_actions: - logging.info(" (NOTE: " + method + " also supports booting" - " the kernel/initramfs directly without flashing." - " Use 'pmbootstrap flasher boot' to do that.)") - - if "flash_lk2nd" in flasher_actions and \ - os.path.exists(args.work + "/chroot_rootfs_" + args.device + - "/boot/lk2nd.img"): - logging.info("* Your device supports and may even require" - " flashing lk2nd. You should flash it before" - " flashing anything else. Use 'pmbootstrap flasher" - " flash_lk2nd' to do that.") - - # Export information - logging.info("* If the above steps do not work, you can also create" - " symlinks to the generated files with 'pmbootstrap export'" - " and flash outside of pmbootstrap.") - - -def install_recovery_zip(args, steps): - logging.info(f"*** ({steps}/{steps}) CREATING RECOVERY-FLASHABLE ZIP ***") - suffix = "buildroot_" + args.deviceinfo["arch"] - mount_device_rootfs(args, f"rootfs_{args.device}", suffix) - pmb.install.recovery.create_zip(args, suffix) - - # Flash information - logging.info("*** FLASHING INFORMATION ***") - logging.info("Flashing with the recovery zip is explained here:") - logging.info("https://postmarketos.org/recoveryzip") - - -def install_on_device_installer(args, step, steps): - # Generate the rootfs image - if not args.ondev_no_rootfs: - suffix_rootfs = f"rootfs_{args.device}" - install_system_image(args, 0, suffix_rootfs, step=step, steps=steps, - split=True) - step += 2 - - # Prepare the installer chroot - logging.info(f"*** ({step}/{steps}) CREATE ON-DEVICE INSTALLER ROOTFS ***") - step += 1 - packages = ([f"device-{args.device}", - "postmarketos-ondev"] + - get_kernel_package(args, args.device) + - get_nonfree_packages(args, args.device)) - - suffix_installer = f"installer_{args.device}" - pmb.chroot.apk.install(args, packages, suffix_installer) - - # Move rootfs image into installer chroot - img_path_dest = f"{args.work}/chroot_{suffix_installer}/var/lib/rootfs.img" - if not args.ondev_no_rootfs: - img = f"{args.device}-root.img" - img_path_src = f"{args.work}/chroot_native/home/pmos/rootfs/{img}" - logging.info(f"({suffix_installer}) add {img} as /var/lib/rootfs.img") - pmb.install.losetup.umount(args, img_path_src) - pmb.helpers.run.root(args, ["mv", img_path_src, img_path_dest]) - - # Run ondev-prepare, so it may generate nice configs from the channel - # properties (e.g. to display the version number), or transform the image - # file into another format. This can all be done without pmbootstrap - # changes in the postmarketos-ondev package. - logging.info(f"({suffix_installer}) ondev-prepare") - channel = pmb.config.pmaports.read_config(args)["channel"] - channel_cfg = pmb.config.pmaports.read_config_channel(args) - env = {"ONDEV_CHANNEL": channel, - "ONDEV_CHANNEL_BRANCH_APORTS": channel_cfg["branch_aports"], - "ONDEV_CHANNEL_BRANCH_PMAPORTS": channel_cfg["branch_pmaports"], - "ONDEV_CHANNEL_DESCRIPTION": channel_cfg["description"], - "ONDEV_CHANNEL_MIRRORDIR_ALPINE": channel_cfg["mirrordir_alpine"], - "ONDEV_CIPHER": args.cipher, - "ONDEV_PMBOOTSTRAP_VERSION": pmb.__version__, - "ONDEV_UI": args.ui} - pmb.chroot.root(args, ["ondev-prepare"], suffix_installer, env=env) - - # Copy files specified with 'pmbootstrap install --ondev --cp' - if args.ondev_cp: - for host_src, chroot_dest in args.ondev_cp: - host_dest = f"{args.work}/chroot_{suffix_installer}/{chroot_dest}" - logging.info(f"({suffix_installer}) add {host_src} as" - f" {chroot_dest}") - pmb.helpers.run.root(args, ["install", "-Dm644", host_src, - host_dest]) - - # Remove $DEVICE-boot.img (we will generate a new one if --split was - # specified, otherwise the separate boot image is not needed) - if not args.ondev_no_rootfs: - img_boot = f"{args.device}-boot.img" - logging.info(f"(native) rm {img_boot}") - pmb.chroot.root(args, ["rm", f"/home/pmos/rootfs/{img_boot}"]) - - # Disable root login - setup_login(args, suffix_installer) - - # Generate installer image - size_reserve = round(os.path.getsize(img_path_dest) / 1024 / 1024) + 200 - pmaports_cfg = pmb.config.pmaports.read_config(args) - boot_label = pmaports_cfg.get("supported_install_boot_label", - "pmOS_inst_boot") - install_system_image(args, size_reserve, suffix_installer, step, steps, - boot_label, "pmOS_install", args.split, args.disk) - - -def get_selected_providers(args, packages): - """ - Look through the specified packages and see which providers were selected - in "pmbootstrap init". Install those as extra packages to select them - instead of the default provider. - - :param packages: the packages that have selectable providers (_pmb_select) - :return: additional provider packages to install - """ - providers = [] - for p in packages: - apkbuild = pmb.helpers.pmaports.get(args, p, subpackages=False) - for select in apkbuild['_pmb_select']: - if select in args.selected_providers: - providers.append(args.selected_providers[select]) - return providers - - -def get_recommends(args, packages): - """ - Look through the specified packages and collect additional packages - specified under _pmb_recommends in them. This is recursive, so it will dive - into packages that are listed under recommends to collect any packages they - might also have listed under their own _pmb_recommends. - - Recursion is only done into packages found in pmaports. - - If running with pmbootstrap install --no-recommends, this function returns - an empty list. - - :returns: list of pkgnames, e.g. ["chatty", "gnome-contacts"] - """ - ret = [] - if not args.install_recommends: - return ret - - for package in packages: - # Note that this ignores packages that don't exist. This means they - # aren't in pmaports. This is fine, with the assumption that - # installation will fail later in some other method if they truly don't - # exist in any repo. - apkbuild = pmb.helpers.pmaports.get(args, package, must_exist=False) - if not apkbuild: - continue - if package in apkbuild["subpackages"]: - # Just focus on the subpackage - apkbuild = apkbuild["subpackages"][package] - recommends = apkbuild["_pmb_recommends"] - if recommends: - logging.debug(f"{package}: install _pmb_recommends:" - f" {', '.join(recommends)}") - ret += recommends - # Call recursively in case recommends have pmb_recommends of their - # own. - ret += get_recommends(args, recommends) - - return ret - - -def create_device_rootfs(args, step, steps): - # List all packages to be installed (including the ones specified by --add) - # and upgrade the installed packages/apkindexes - logging.info(f'*** ({step}/{steps}) CREATE DEVICE ROOTFS ("{args.device}")' - ' ***') - - suffix = f"rootfs_{args.device}" - # Create user before installing packages, so post-install scripts of - # pmaports can figure out the username (legacy reasons: pmaports#820) - set_user(args) - - # Fill install_packages - install_packages = (pmb.config.install_device_packages + - ["device-" + args.device]) - if not args.install_base: - install_packages = [p for p in install_packages - if p != "postmarketos-base"] - if args.ui.lower() != "none": - install_packages += ["postmarketos-ui-" + args.ui] - - # Add additional providers of base/device/UI package - install_packages += get_selected_providers(args, install_packages) - - install_packages += get_kernel_package(args, args.device) - install_packages += get_nonfree_packages(args, args.device) - if args.ui.lower() != "none": - if args.ui_extras: - install_packages += ["postmarketos-ui-" + args.ui + "-extras"] - if args.extra_packages.lower() != "none": - install_packages += args.extra_packages.split(",") - if args.add: - install_packages += args.add.split(",") - locale_is_set = (args.locale != pmb.config.defaults["locale"]) - if locale_is_set: - install_packages += ["lang", "musl-locales"] - - pmaports_cfg = pmb.config.pmaports.read_config(args) - # postmarketos-base supports a dummy package for blocking osk-sdl install - # when not required - if pmaports_cfg.get("supported_base_nofde", None): - # The ondev installer *could* enable fde at runtime, so include it - # explicitly in the rootfs until there's a mechanism to selectively - # install it when the ondev installer is running. - # Always install it when --fde is specified. - if args.full_disk_encryption or args.on_device_installer: - # Pick the most suitable unlocker depending on the packages - # selected for installation - unlocker = pmb.parse.depends.package_provider( - args, "postmarketos-fde-unlocker", install_packages, suffix) - if unlocker["pkgname"] not in install_packages: - install_packages += [unlocker["pkgname"]] - else: - install_packages += ["postmarketos-base-nofde"] - - pmb.helpers.repo.update(args, args.deviceinfo["arch"]) - - # Install uninstallable "dependencies" by default - install_packages += get_recommends(args, install_packages) - - # Explicitly call build on the install packages, to re-build them or any - # dependency, in case the version increased - if args.build_pkgs_on_install: - for pkgname in install_packages: - pmb.build.package(args, pkgname, args.deviceinfo["arch"]) - - # Install all packages to device rootfs chroot (and rebuild the initramfs, - # because that doesn't always happen automatically yet, e.g. when the user - # installed a hook without pmbootstrap - see #69 for more info) - pmb.chroot.apk.install(args, install_packages, suffix) - flavor = pmb.chroot.other.kernel_flavor_installed(args, suffix) - pmb.chroot.initfs.build(args, flavor, suffix) - - # Set the user password - setup_login(args, suffix) - - # Set the keymap if the device requires it - setup_keymap(args) - - # Set timezone - setup_timezone(args) - - # Set locale - if locale_is_set: - # 10locale-pmos.sh gets sourced before 20locale.sh from - # alpine-baselayout by /etc/profile. Since they don't override the - # locale if it exists, it warranties we have preference - line = f"export LANG=${{LANG:-{shlex.quote(args.locale)}}}" - pmb.chroot.root(args, ["sh", "-c", f"echo {shlex.quote(line)}" - " > /etc/profile.d/10locale-pmos.sh"], suffix) - - # Set the hostname as the device name - setup_hostname(args) - - setup_appstream(args) - - disable_sshd(args) - disable_firewall(args) - - -def install(args): - # Sanity checks - sanity_check_boot_size(args) - if not args.android_recovery_zip and args.disk: - sanity_check_disk(args) - sanity_check_disk_size(args) - if args.on_device_installer: - sanity_check_ondev_version(args) - - # Number of steps for the different installation methods. - if args.no_image: - steps = 2 - elif args.android_recovery_zip: - steps = 3 - elif args.on_device_installer: - steps = 4 if args.ondev_no_rootfs else 7 - else: - steps = 4 - - if args.zap: - pmb.chroot.zap(args, False) - - # Install required programs in native chroot - step = 1 - logging.info(f"*** ({step}/{steps}) PREPARE NATIVE CHROOT ***") - pmb.chroot.apk.install(args, pmb.config.install_native_packages, - build=False) - step += 1 - - if not args.ondev_no_rootfs: - create_device_rootfs(args, step, steps) - step += 1 - - if args.no_image: - return - elif args.android_recovery_zip: - return install_recovery_zip(args, steps) - - if args.on_device_installer: - # Runs install_system_image twice - install_on_device_installer(args, step, steps) - else: - install_system_image(args, 0, f"rootfs_{args.device}", step, steps, - split=args.split, disk=args.disk) - - print_flash_info(args) - print_sshd_info(args) - print_firewall_info(args) - - # Leave space before 'chroot still active' note - logging.info("") diff --git a/pmb/install/blockdevice.py b/pmb/install/blockdevice.py deleted file mode 100644 index 4ca8afe5..00000000 --- a/pmb/install/blockdevice.py +++ /dev/null @@ -1,145 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import os -import glob -import pmb.helpers.mount -import pmb.install.losetup -import pmb.helpers.cli -import pmb.config - - -def previous_install(args, path): - """ - Search the disk for possible existence of a previous installation of - pmOS. We temporarily mount the possible pmOS_boot partition as - /dev/diskp1 inside the native chroot to check the label from there. - :param path: path to disk block device (e.g. /dev/mmcblk0) - """ - label = "" - for blockdevice_outside in [f"{path}1", f"{path}p1"]: - if not os.path.exists(blockdevice_outside): - continue - blockdevice_inside = "/dev/diskp1" - pmb.helpers.mount.bind_file(args, blockdevice_outside, - args.work + '/chroot_native' + - blockdevice_inside) - try: - label = pmb.chroot.root(args, ["blkid", "-s", "LABEL", - "-o", "value", - blockdevice_inside], - output_return=True) - except RuntimeError: - logging.info("WARNING: Could not get block device label," - " assume no previous installation on that partition") - - pmb.helpers.run.root(args, ["umount", args.work + "/chroot_native" + - blockdevice_inside]) - return "pmOS_boot" in label - - -def mount_disk(args, path): - """ - :param path: path to disk block device (e.g. /dev/mmcblk0) - """ - # Sanity checks - if not os.path.exists(path): - raise RuntimeError(f"The disk block device does not exist: {path}") - for path_mount in glob.glob(f"{path}*"): - if pmb.helpers.mount.ismount(path_mount): - raise RuntimeError(f"{path_mount} is mounted! Will not attempt to" - " format this!") - logging.info(f"(native) mount /dev/install (host: {path})") - pmb.helpers.mount.bind_file(args, path, - args.work + "/chroot_native/dev/install") - if previous_install(args, path): - if not pmb.helpers.cli.confirm(args, "WARNING: This device has a" - " previous installation of pmOS." - " CONTINUE?"): - raise RuntimeError("Aborted.") - else: - if not pmb.helpers.cli.confirm(args, f"EVERYTHING ON {path} WILL BE" - " ERASED! CONTINUE?"): - raise RuntimeError("Aborted.") - - -def create_and_mount_image(args, size_boot, size_root, size_reserve, - split=False): - """ - Create a new image file, and mount it as /dev/install. - - :param size_boot: size of the boot partition in MiB - :param size_root: size of the root partition in MiB - :param size_reserve: empty partition between root and boot in MiB (pma#463) - :param split: create separate images for boot and root partitions - """ - - # Short variables for paths - chroot = args.work + "/chroot_native" - img_path_prefix = "/home/pmos/rootfs/" + args.device - img_path_full = img_path_prefix + ".img" - img_path_boot = img_path_prefix + "-boot.img" - img_path_root = img_path_prefix + "-root.img" - - # Umount and delete existing images - for img_path in [img_path_full, img_path_boot, img_path_root]: - outside = chroot + img_path - if os.path.exists(outside): - pmb.helpers.mount.umount_all(args, chroot + "/mnt") - pmb.install.losetup.umount(args, img_path) - pmb.chroot.root(args, ["rm", img_path]) - - # Make sure there is enough free space - size_mb = round(size_boot + size_reserve + size_root) - disk_data = os.statvfs(args.work) - free = round((disk_data.f_bsize * disk_data.f_bavail) / (1024**2)) - if size_mb > free: - raise RuntimeError("Not enough free space to create rootfs image! " - f"(free: {free}M, required: {size_mb}M)") - - # Create empty image files - pmb.chroot.user(args, ["mkdir", "-p", "/home/pmos/rootfs"]) - size_mb_full = str(size_mb) + "M" - size_mb_boot = str(round(size_boot)) + "M" - size_mb_root = str(round(size_root)) + "M" - images = {img_path_full: size_mb_full} - if split: - images = {img_path_boot: size_mb_boot, - img_path_root: size_mb_root} - for img_path, size_mb in images.items(): - logging.info(f"(native) create {os.path.basename(img_path)} " - f"({size_mb})") - pmb.chroot.root(args, ["truncate", "-s", size_mb, img_path]) - - # Mount to /dev/install - mount_image_paths = {img_path_full: "/dev/install"} - if split: - mount_image_paths = {img_path_boot: "/dev/installp1", - img_path_root: "/dev/installp2"} - - for img_path, mount_point in mount_image_paths.items(): - logging.info("(native) mount " + mount_point + - " (" + os.path.basename(img_path) + ")") - pmb.install.losetup.mount(args, img_path) - device = pmb.install.losetup.device_by_back_file(args, img_path) - pmb.helpers.mount.bind_file(args, device, - args.work + "/chroot_native" + mount_point) - - -def create(args, size_boot, size_root, size_reserve, split, disk): - """ - Create /dev/install (the "install blockdevice"). - - :param size_boot: size of the boot partition in MiB - :param size_root: size of the root partition in MiB - :param size_reserve: empty partition between root and boot in MiB (pma#463) - :param split: create separate images for boot and root partitions - :param disk: path to disk block device (e.g. /dev/mmcblk0) or None - """ - pmb.helpers.mount.umount_all( - args, args.work + "/chroot_native/dev/install") - if disk: - mount_disk(args, disk) - else: - create_and_mount_image(args, size_boot, size_root, size_reserve, - split) diff --git a/pmb/install/format.py b/pmb/install/format.py deleted file mode 100644 index b1f7b485..00000000 --- a/pmb/install/format.py +++ /dev/null @@ -1,150 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import logging -import pmb.chroot - - -def install_fsprogs(args, filesystem): - """ Install the package required to format a specific filesystem. """ - fsprogs = pmb.config.filesystems.get(filesystem) - if not fsprogs: - raise RuntimeError(f"Unsupported filesystem: {filesystem}") - pmb.chroot.apk.install(args, [fsprogs]) - - -def format_and_mount_boot(args, device, boot_label): - """ - :param device: boot partition on install block device (e.g. /dev/installp1) - :param boot_label: label of the root partition (e.g. "pmOS_boot") - - When adjusting this function, make sure to also adjust - ondev-prepare-internal-storage.sh in postmarketos-ondev.git! - """ - mountpoint = "/mnt/install/boot" - filesystem = args.deviceinfo["boot_filesystem"] or "ext2" - install_fsprogs(args, filesystem) - logging.info(f"(native) format {device} (boot, {filesystem}), mount to" - f" {mountpoint}") - if filesystem == "fat16": - pmb.chroot.root(args, ["mkfs.fat", "-F", "16", "-n", boot_label, - device]) - elif filesystem == "fat32": - pmb.chroot.root(args, ["mkfs.fat", "-F", "32", "-n", boot_label, - device]) - elif filesystem == "ext2": - pmb.chroot.root(args, ["mkfs.ext2", "-F", "-q", "-L", boot_label, - device]) - elif filesystem == "btrfs": - pmb.chroot.root(args, ["mkfs.btrfs", "-f", "-q", "-L", boot_label, - device]) - else: - raise RuntimeError("Filesystem " + filesystem + " is not supported!") - pmb.chroot.root(args, ["mkdir", "-p", mountpoint]) - pmb.chroot.root(args, ["mount", device, mountpoint]) - - -def format_luks_root(args, device): - """ - :param device: root partition on install block device (e.g. /dev/installp2) - """ - mountpoint = "/dev/mapper/pm_crypt" - - logging.info(f"(native) format {device} (root, luks), mount to" - f" {mountpoint}") - logging.info(" *** TYPE IN THE FULL DISK ENCRYPTION PASSWORD (TWICE!) ***") - - # Avoid cryptsetup warning about missing locking directory - pmb.chroot.root(args, ["mkdir", "-p", "/run/cryptsetup"]) - - pmb.chroot.root(args, ["cryptsetup", "luksFormat", - "-q", - "--cipher", args.cipher, - "--iter-time", args.iter_time, - "--use-random", - device], output="interactive") - pmb.chroot.root(args, ["cryptsetup", "luksOpen", device, "pm_crypt"], - output="interactive") - - if not os.path.exists(f"{args.work}/chroot_native/{mountpoint}"): - raise RuntimeError("Failed to open cryptdevice!") - - -def get_root_filesystem(args): - ret = args.filesystem or args.deviceinfo["root_filesystem"] or "ext4" - pmaports_cfg = pmb.config.pmaports.read_config(args) - - supported = pmaports_cfg.get("supported_root_filesystems", "ext4") - supported_list = supported.split(",") - - if ret not in supported_list: - raise ValueError(f"Root filesystem {ret} is not supported by your" - " currently checked out pmaports branch. Update your" - " branch ('pmbootstrap pull'), change it" - " ('pmbootstrap init'), or select one of these" - f" filesystems: {', '.join(supported_list)}") - return ret - - -def format_and_mount_root(args, device, root_label, disk): - """ - :param device: root partition on install block device (e.g. /dev/installp2) - :param root_label: label of the root partition (e.g. "pmOS_root") - :param disk: path to disk block device (e.g. /dev/mmcblk0) or None - """ - # Format - if not args.rsync: - filesystem = get_root_filesystem(args) - - if filesystem == "ext4": - # Some downstream kernels don't support metadata_csum (#1364). - # When changing the options of mkfs.ext4, also change them in the - # recovery zip code (see 'grep -r mkfs\.ext4')! - mkfs_root_args = ["mkfs.ext4", "-O", "^metadata_csum", "-F", - "-q", "-L", root_label] - # When we don't know the file system size before hand like - # with non-block devices, we need to explicitly set a number of - # inodes. See #1717 and #1845 for details - if not disk: - mkfs_root_args = mkfs_root_args + ["-N", "100000"] - elif filesystem == "f2fs": - mkfs_root_args = ["mkfs.f2fs", "-f", "-l", root_label] - elif filesystem == "btrfs": - mkfs_root_args = ["mkfs.btrfs", "-f", "-L", root_label] - else: - raise RuntimeError(f"Don't know how to format {filesystem}!") - - install_fsprogs(args, filesystem) - logging.info(f"(native) format {device} (root, {filesystem})") - pmb.chroot.root(args, mkfs_root_args + [device]) - - # Mount - mountpoint = "/mnt/install" - logging.info("(native) mount " + device + " to " + mountpoint) - pmb.chroot.root(args, ["mkdir", "-p", mountpoint]) - pmb.chroot.root(args, ["mount", device, mountpoint]) - - # Create separate subvolumes if root filesystem is btrfs - if filesystem == "btrfs": - pmb.chroot.root(args, - ["btrfs", "subvol", "create", mountpoint + "/root"]) - pmb.chroot.root(args, - ["btrfs", "subvol", "create", mountpoint + "/var"]) - - -def format(args, layout, boot_label, root_label, disk): - """ - :param layout: partition layout from get_partition_layout() - :param boot_label: label of the boot partition (e.g. "pmOS_boot") - :param root_label: label of the root partition (e.g. "pmOS_root") - :param disk: path to disk block device (e.g. /dev/mmcblk0) or None - """ - root_dev = f"/dev/installp{layout['root']}" - boot_dev = f"/dev/installp{layout['boot']}" - - if args.full_disk_encryption: - format_luks_root(args, root_dev) - root_dev = "/dev/mapper/pm_crypt" - - format_and_mount_root(args, root_dev, root_label, disk) - format_and_mount_boot(args, boot_dev, boot_label) diff --git a/pmb/install/losetup.py b/pmb/install/losetup.py deleted file mode 100644 index 6fe943ba..00000000 --- a/pmb/install/losetup.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import glob -import json -import logging -import os -import time - -import pmb.helpers.mount -import pmb.helpers.run -import pmb.chroot - - -def init(args): - if not os.path.isdir("/sys/module/loop"): - pmb.helpers.run.root(args, ["modprobe", "loop"]) - for loopdevice in glob.glob("/dev/loop*"): - if os.path.isdir(loopdevice): - continue - pmb.helpers.mount.bind_file(args, loopdevice, - args.work + "/chroot_native/" + loopdevice) - - -def mount(args, img_path): - """ - :param img_path: Path to the img file inside native chroot. - """ - logging.debug("(native) mount " + img_path + " (loop)") - - # Try to mount multiple times (let the kernel module initialize #1594) - for i in range(0, 5): - # Retry - if i > 0: - logging.debug("loop module might not be initialized yet, retry in" - " one second...") - time.sleep(1) - - # Mount and return on success - init(args) - - losetup_cmd = ["losetup", "-f", img_path] - sector_size = args.deviceinfo["rootfs_image_sector_size"] - if sector_size: - losetup_cmd += ["-b", str(int(sector_size))] - - pmb.chroot.root(args, losetup_cmd, check=False) - if device_by_back_file(args, img_path): - return - - # Failure: raise exception - raise RuntimeError("Failed to mount loop device: " + img_path) - - -def device_by_back_file(args, back_file, auto_init=True): - """ - Get the /dev/loopX device that points to a specific image file. - """ - - # Get list from losetup - losetup_output = pmb.chroot.root(args, ["losetup", "--json", "--list"], - output_return=True, auto_init=auto_init) - if not losetup_output: - return None - - # Find the back_file - losetup = json.loads(losetup_output) - for loopdevice in losetup["loopdevices"]: - if loopdevice["back-file"] == back_file: - return loopdevice["name"] - return None - - -def umount(args, img_path, auto_init=True): - """ - :param img_path: Path to the img file inside native chroot. - """ - device = device_by_back_file(args, img_path, auto_init) - if not device: - return - logging.debug("(native) umount " + device) - pmb.chroot.root(args, ["losetup", "-d", device], auto_init=auto_init) diff --git a/pmb/install/partition.py b/pmb/install/partition.py deleted file mode 100644 index c0e3fd68..00000000 --- a/pmb/install/partition.py +++ /dev/null @@ -1,192 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import os -import time -import pmb.chroot -import pmb.config -import pmb.install.losetup - - -def partitions_mount(args, layout, disk): - """ - Mount blockdevices of partitions inside native chroot - :param layout: partition layout from get_partition_layout() - :param disk: path to disk block device (e.g. /dev/mmcblk0) or None - """ - prefix = disk - if not disk: - img_path = "/home/pmos/rootfs/" + args.device + ".img" - prefix = pmb.install.losetup.device_by_back_file(args, img_path) - - tries = 20 - - # Devices ending with a number have a "p" before the partition number, - # /dev/sda1 has no "p", but /dev/mmcblk0p1 has. See add_partition() in - # block/partitions/core.c of linux.git. - partition_prefix = prefix - if str.isdigit(prefix[-1:]): - partition_prefix = f"{prefix}p" - - found = False - for i in range(tries): - if os.path.exists(f"{partition_prefix}1"): - found = True - break - logging.debug(f"NOTE: ({i + 1}/{tries}) failed to find the install " - "partition. Retrying...") - time.sleep(0.1) - - if not found: - raise RuntimeError(f"Unable to find the first partition of {prefix}, " - f"expected it to be at {partition_prefix}1!") - - partitions = [layout["boot"], layout["root"]] - - if layout["kernel"]: - partitions += [layout["kernel"]] - - for i in partitions: - source = f"{partition_prefix}{i}" - target = args.work + "/chroot_native/dev/installp" + str(i) - pmb.helpers.mount.bind_file(args, source, target) - - -def partition(args, layout, size_boot, size_reserve): - """ - Partition /dev/install and create /dev/install{p1,p2,p3}: - * /dev/installp1: boot - * /dev/installp2: root (or reserved space) - * /dev/installp3: (root, if reserved space > 0) - - When adjusting this function, make sure to also adjust - ondev-prepare-internal-storage.sh in postmarketos-ondev.git! - - :param layout: partition layout from get_partition_layout() - :param size_boot: size of the boot partition in MiB - :param size_reserve: empty partition between root and boot in MiB (pma#463) - """ - # Convert to MB and print info - mb_boot = f"{round(size_boot)}M" - mb_reserved = f"{round(size_reserve)}M" - mb_root_start = f"{round(size_boot) + round(size_reserve)}M" - logging.info(f"(native) partition /dev/install (boot: {mb_boot}," - f" reserved: {mb_reserved}, root: the rest)") - - filesystem = args.deviceinfo["boot_filesystem"] or "ext2" - - # Actual partitioning with 'parted'. Using check=False, because parted - # sometimes "fails to inform the kernel". In case it really failed with - # partitioning, the follow-up mounting/formatting will not work, so it - # will stop there (see #463). - boot_part_start = args.deviceinfo["boot_part_start"] or "2048" - - partition_type = args.deviceinfo["partition_type"] or "msdos" - - commands = [ - ["mktable", partition_type], - ["mkpart", "primary", filesystem, boot_part_start + 's', mb_boot], - ] - - if size_reserve: - mb_reserved_end = f"{round(size_reserve + size_boot)}M" - commands += [["mkpart", "primary", mb_boot, mb_reserved_end]] - - commands += [ - ["mkpart", "primary", mb_root_start, "100%"], - ["set", str(layout["boot"]), "boot", "on"] - ] - - # Not strictly necessary if the device doesn't use EFI boot, but marking - # it as an ESP will cover all situations where the device does use EFI - # boot. Marking it as ESP is helpful for EFI fw when it's looking for EFI - # system partitions. It's assumed that setting this bit is unlikely to - # cause problems for other situations, like when using Legacy BIOS boot - # or u-boot. - if partition_type.lower() == "gpt": - commands += [["set", str(layout["boot"]), "esp", "on"]] - - for command in commands: - pmb.chroot.root(args, ["parted", "-s", "/dev/install"] + - command, check=False) - - -def partition_cgpt(args, layout, size_boot, size_reserve): - """ - This function does similar functionality to partition(), but this - one is for ChromeOS devices which use special GPT. - - :param layout: partition layout from get_partition_layout() - :param size_boot: size of the boot partition in MiB - :param size_reserve: empty partition between root and boot in MiB (pma#463) - """ - - pmb.chroot.apk.install(args, ["cgpt"], build=False) - - cgpt = { - 'kpart_start': args.deviceinfo["cgpt_kpart_start"], - 'kpart_size': args.deviceinfo["cgpt_kpart_size"], - } - - # Convert to MB and print info - mb_boot = f"{round(size_boot)}M" - mb_reserved = f"{round(size_reserve)}M" - logging.info(f"(native) partition /dev/install (boot: {mb_boot}," - f" reserved: {mb_reserved}, root: the rest)") - - boot_part_start = str(int(cgpt['kpart_start']) + int(cgpt['kpart_size'])) - - # Convert to sectors - s_boot = str(int(size_boot * 1024 * 1024 / 512)) - s_root_start = str(int( - int(boot_part_start) + int(s_boot) + size_reserve * 1024 * 1024 / 512 - )) - - commands = [ - ["parted", "-s", "/dev/install", "mktable", "gpt"], - ["cgpt", "create", "/dev/install"], - [ - "cgpt", "add", - "-i", str(layout["kernel"]), - "-t", "kernel", - "-b", cgpt['kpart_start'], - "-s", cgpt['kpart_size'], - "-l", "pmOS_kernel", - "-S", "1", # Successful flag - "-T", "5", # Tries flag - "-P", "10", # Priority flag - "/dev/install" - ], - [ - "cgpt", "add", - # pmOS_boot is second partition, the first will be ChromeOS kernel - # partition - "-i", str(layout["boot"]), # Partition number - "-t", "efi", # Mark this partition as bootable for u-boot - "-b", boot_part_start, - "-s", s_boot, - "-l", "pmOS_boot", - "/dev/install" - ], - ] - - dev_size = pmb.chroot.root( - args, ["blockdev", "--getsz", "/dev/install"], output_return=True) - # 33: Sec GPT table (32) + Sec GPT header (1) - root_size = str(int(dev_size) - int(s_root_start) - 33) - - commands += [ - [ - "cgpt", "add", - "-i", str(layout["root"]), - "-t", "data", - "-b", s_root_start, - "-s", root_size, - "-l", "pmOS_root", - "/dev/install" - ], - ["partx", "-a", "/dev/install"] - ] - - for command in commands: - pmb.chroot.root(args, command, check=False) diff --git a/pmb/install/recovery.py b/pmb/install/recovery.py deleted file mode 100644 index a95e714d..00000000 --- a/pmb/install/recovery.py +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright 2023 Attila Szollosi -# SPDX-License-Identifier: GPL-3.0-or-later -import logging - -import pmb.chroot -import pmb.config.pmaports -import pmb.flasher -import pmb.helpers.frontend - - -def create_zip(args, suffix): - """ - Create android recovery compatible installer zip. - """ - zip_root = "/var/lib/postmarketos-android-recovery-installer/" - rootfs = "/mnt/rootfs_" + args.device - flavor = pmb.helpers.frontend._parse_flavor(args) - method = args.deviceinfo["flash_method"] - vars = pmb.flasher.variables(args, flavor, method) - - # Install recovery installer package in buildroot - pmb.chroot.apk.install(args, - ["postmarketos-android-recovery-installer"], - suffix) - - logging.info("(" + suffix + ") create recovery zip") - - for key in vars: - pmb.flasher.check_partition_blacklist(args, key, vars[key]) - - # Create config file for the recovery installer - options = { - "DEVICE": args.device, - "FLASH_KERNEL": args.recovery_flash_kernel, - "ISOREC": method == "heimdall-isorec", - "KERNEL_PARTLABEL": vars["$PARTITION_KERNEL"], - "INITFS_PARTLABEL": vars["$PARTITION_INITFS"], - # Name is still "SYSTEM", not "ROOTFS" in the recovery installer - "SYSTEM_PARTLABEL": vars["$PARTITION_ROOTFS"], - "INSTALL_PARTITION": args.recovery_install_partition, - "CIPHER": args.cipher, - "FDE": args.full_disk_encryption, - } - - # Backwards compatibility with old mkinitfs (pma#660) - pmaports_cfg = pmb.config.pmaports.read_config(args) - if pmaports_cfg.get("supported_mkinitfs_without_flavors", False): - options["FLAVOR"] = "" - else: - options["FLAVOR"] = f"-{flavor}" if flavor is not None else "-" - - # Write to a temporary file - config_temp = args.work + "/chroot_" + suffix + "/tmp/install_options" - with open(config_temp, "w") as handle: - for key, value in options.items(): - if isinstance(value, bool): - value = str(value).lower() - handle.write(key + "='" + value + "'\n") - - commands = [ - # Move config file from /tmp/ to zip root - ["mv", "/tmp/install_options", "chroot/install_options"], - # Create tar archive of the rootfs - ["tar", "-pcf", "rootfs.tar", "--exclude", "./home", "-C", rootfs, - "."], - # Append packages keys - ["tar", "-prf", "rootfs.tar", "-C", "/", "./etc/apk/keys"], - # Compress with -1 for speed improvement - ["gzip", "-f1", "rootfs.tar"], - ["build-recovery-zip", args.device]] - for command in commands: - pmb.chroot.root(args, command, suffix, zip_root) diff --git a/pmb/install/ui.py b/pmb/install/ui.py deleted file mode 100644 index 95d0ec47..00000000 --- a/pmb/install/ui.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2023 Dylan Van Assche -# SPDX-License-Identifier: GPL-3.0-or-later -import logging - -import pmb.helpers.pmaports - - -def get_groups(args): - """ Get all groups to which the user additionally must be added. - The list of groups are listed in _pmb_groups of the UI and - UI-extras package. - - :returns: list of groups, e.g. ["feedbackd", "udev"] """ - ret = [] - if args.ui == "none": - return ret - - # UI package - meta = f"postmarketos-ui-{args.ui}" - apkbuild = pmb.helpers.pmaports.get(args, meta) - groups = apkbuild["_pmb_groups"] - if groups: - logging.debug(f"{meta}: install _pmb_groups:" - f" {', '.join(groups)}") - ret += groups - - # UI-extras subpackage - meta_extras = f"{meta}-extras" - if args.ui_extras and meta_extras in apkbuild["subpackages"]: - groups = apkbuild["subpackages"][meta_extras]["_pmb_groups"] - if groups: - logging.debug(f"{meta_extras}: install _pmb_groups:" - f" {', '.join(groups)}") - ret += groups - - return ret diff --git a/pmb/netboot/__init__.py b/pmb/netboot/__init__.py deleted file mode 100644 index c05449fc..00000000 --- a/pmb/netboot/__init__.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright 2023 Mark Hargreaves, Luca Weiss -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import os -import socket -import time - -import pmb.chroot.root -import pmb.helpers.run - - -def start_nbd_server(args, ip="172.16.42.2", port=9999): - """ - Start nbd server in chroot_native with pmOS rootfs. - :param ip: IP address to serve nbd server for - :param port: port of nbd server - """ - - pmb.chroot.apk.install(args, ['nbd']) - - chroot = f"{args.work}/chroot_native" - - rootfs_path = f"/mnt/pmbootstrap/netboot/{args.device}.img" - if not os.path.exists(chroot + rootfs_path) or args.replace: - rootfs_path2 = f"/home/pmos/rootfs/{args.device}.img" - if not os.path.exists(chroot + rootfs_path2): - raise RuntimeError("The rootfs has not been generated yet, please " - "run 'pmbootstrap install' first.") - if args.replace and not \ - pmb.helpers.cli.confirm(args, f"Are you sure you want to " - f"replace the rootfs for " - f"{args.device}?"): - return - pmb.chroot.root(args, ["cp", rootfs_path2, rootfs_path]) - logging.info(f"NOTE: Copied device image to {args.work}" - f"/images_netboot/. The image will persist \"pmbootstrap " - f"zap\" for your convenience. Use \"pmbootstrap netboot " - f"serve --help\" for more options.") - - logging.info(f"Running nbd server for {args.device} on {ip} port {port}.") - - while True: - logging.info("Waiting for postmarketOS device to appear...") - - # Try to bind to the IP ourselves before handing it to nbd-servere - # This is purely to improve the UX as nbd-server just quits when it - # cannot bind to an IP address. - test_socket = socket.socket() - while True: - try: - test_socket.bind((ip, 9998)) - except OSError as e: - if e.errno != 99: # Cannot assign requested address - raise e - # Wait a bit before retrying - time.sleep(0.5) - continue - test_socket.close() - break - - logging.info("Found postmarketOS device, serving image...") - pmb.chroot.root( - args, ["nbd-server", f"{ip}@{port}", rootfs_path, "-d"], - check=False, disable_timeout=True) - logging.info("nbd-server quit. Connection lost?") - # On a reboot nbd-server will quit, but the IP address sticks around - # for a bit longer, so wait. - time.sleep(5) diff --git a/pmb/parse/__init__.py b/pmb/parse/__init__.py deleted file mode 100644 index ee628c45..00000000 --- a/pmb/parse/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -from pmb.parse.arguments import arguments, arguments_install, arguments_flasher -from pmb.parse._apkbuild import apkbuild -from pmb.parse._apkbuild import function_body -from pmb.parse.binfmt_info import binfmt_info -from pmb.parse.deviceinfo import deviceinfo -from pmb.parse.kconfig import check -from pmb.parse.bootimg import bootimg -from pmb.parse.cpuinfo import arm_big_little_first_group_ncpus -import pmb.parse.arch diff --git a/pmb/parse/_apkbuild.py b/pmb/parse/_apkbuild.py deleted file mode 100644 index a76dedc0..00000000 --- a/pmb/parse/_apkbuild.py +++ /dev/null @@ -1,429 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import os -import re -from collections import OrderedDict - -import pmb.config -import pmb.helpers.devices -import pmb.parse.version - -# sh variable name regex: https://stackoverflow.com/a/2821201/3527128 - -# ${foo} -revar = re.compile(r"\${([a-zA-Z_]+[a-zA-Z0-9_]*)}") - -# $foo -revar2 = re.compile(r"\$([a-zA-Z_]+[a-zA-Z0-9_]*)") - -# ${var/foo/bar}, ${var/foo/}, ${var/foo} -- replace foo with bar -revar3 = re.compile(r"\${([a-zA-Z_]+[a-zA-Z0-9_]*)/([^/]+)(?:/([^/]*?))?}") - -# ${foo#bar} -- cut off bar from foo from start of string -revar4 = re.compile(r"\${([a-zA-Z_]+[a-zA-Z0-9_]*)#(.*)}") - - -def replace_variable(apkbuild, value: str) -> str: - def log_key_not_found(match): - logging.verbose(f"{apkbuild['pkgname']}: key '{match.group(1)}' for" - f" replacing '{match.group(0)}' not found, ignoring") - - # ${foo} - for match in revar.finditer(value): - try: - logging.verbose("{}: replace '{}' with '{}'".format( - apkbuild["pkgname"], match.group(0), - apkbuild[match.group(1)])) - value = value.replace(match.group(0), apkbuild[match.group(1)], 1) - except KeyError: - log_key_not_found(match) - - # $foo - for match in revar2.finditer(value): - try: - newvalue = apkbuild[match.group(1)] - logging.verbose("{}: replace '{}' with '{}'".format( - apkbuild["pkgname"], match.group(0), - newvalue)) - value = value.replace(match.group(0), newvalue, 1) - except KeyError: - log_key_not_found(match) - - # ${var/foo/bar}, ${var/foo/}, ${var/foo} - for match in revar3.finditer(value): - try: - newvalue = apkbuild[match.group(1)] - search = match.group(2) - replacement = match.group(3) - if replacement is None: # arg 3 is optional - replacement = "" - newvalue = newvalue.replace(search, replacement, 1) - logging.verbose("{}: replace '{}' with '{}'".format( - apkbuild["pkgname"], match.group(0), newvalue)) - value = value.replace(match.group(0), newvalue, 1) - except KeyError: - log_key_not_found(match) - - # ${foo#bar} - rematch4 = revar4.finditer(value) - for match in rematch4: - try: - newvalue = apkbuild[match.group(1)] - substr = match.group(2) - if newvalue.startswith(substr): - newvalue = newvalue.replace(substr, "", 1) - logging.verbose("{}: replace '{}' with '{}'".format( - apkbuild["pkgname"], match.group(0), newvalue)) - value = value.replace(match.group(0), newvalue, 1) - except KeyError: - log_key_not_found(match) - - return value - - -def function_body(path, func): - """ - Get the body of a function in an APKBUILD. - - :param path: full path to the APKBUILD - :param func: name of function to get the body of. - :returns: function body in an array of strings. - """ - func_body = [] - in_func = False - lines = read_file(path) - for line in lines: - if in_func: - if line.startswith("}"): - in_func = False - break - func_body.append(line) - continue - else: - if line.startswith(func + "() {"): - in_func = True - continue - return func_body - - -def read_file(path): - """ - Read an APKBUILD file - - :param path: full path to the APKBUILD - :returns: contents of an APKBUILD as a list of strings - """ - with open(path, encoding="utf-8") as handle: - lines = handle.readlines() - if handle.newlines != '\n': - raise RuntimeError(f"Wrong line endings in APKBUILD: {path}") - return lines - - -def parse_attribute(attribute, lines, i, path): - """ - Parse one attribute from the APKBUILD. - - It may be written across multiple lines, use a quoting sign and/or have - a comment at the end. Some examples: - - pkgrel=3 - options="!check" # ignore this comment - arch='all !armhf' - depends=" - first-pkg - second-pkg" - - :param attribute: from the APKBUILD, i.e. "pkgname" - :param lines: \n-terminated list of lines from the APKBUILD - :param i: index of the line we are currently looking at - :param path: full path to the APKBUILD (for error message) - :returns: (found, value, i) - found: True if the attribute was found in line i, False otherwise - value: that was parsed from the line - i: line that was parsed last - """ - # Check for and cut off "attribute=" - if not lines[i].startswith(attribute + "="): - return (False, None, i) - value = lines[i][len(attribute + "="):-1] - - # Determine end quote sign - end_char = None - for char in ["'", "\""]: - if value.startswith(char): - end_char = char - value = value[1:] - break - - # Single line - if not end_char: - value = value.split("#")[0].rstrip() - return (True, value, i) - if end_char in value: - value = value.split(end_char, 1)[0] - return (True, value, i) - - # Parse lines until reaching end quote - i += 1 - while i < len(lines): - line = lines[i] - value += " " - if end_char in line: - value += line.split(end_char, 1)[0].strip() - return (True, value.strip(), i) - value += line.strip() - i += 1 - - raise RuntimeError(f"Can't find closing quote sign ({end_char}) for" - f" attribute '{attribute}' in: {path}") - - -def _parse_attributes(path, lines, apkbuild_attributes, ret): - """ - Parse attributes from a list of lines. Variables are replaced with values - from ret (if found) and split into the format configured in - apkbuild_attributes. - - :param lines: the lines to parse - :param apkbuild_attributes: the attributes to parse - :param ret: a dict to update with new parsed variable - """ - for i in range(len(lines)): - for attribute, options in apkbuild_attributes.items(): - found, value, i = parse_attribute(attribute, lines, i, path) - if not found: - continue - - ret[attribute] = replace_variable(ret, value) - - if "subpackages" in apkbuild_attributes: - subpackages = OrderedDict() - for subpkg in ret["subpackages"].split(" "): - if subpkg: - _parse_subpackage(path, lines, ret, subpackages, subpkg) - ret["subpackages"] = subpackages - - # Split attributes - for attribute, options in apkbuild_attributes.items(): - if options.get("array", False): - # Split up arrays, delete empty strings inside the list - ret[attribute] = list(filter(None, ret[attribute].split(" "))) - if options.get("int", False): - if ret[attribute]: - ret[attribute] = int(ret[attribute]) - else: - ret[attribute] = 0 - - -def _parse_subpackage(path, lines, apkbuild, subpackages, subpkg): - """ - Attempt to parse attributes from a subpackage function. - This will attempt to locate the subpackage function in the APKBUILD and - update the given attributes with values set in the subpackage function. - - :param path: path to APKBUILD - :param lines: the lines to parse - :param apkbuild: dict of attributes already parsed from APKBUILD - :param subpackages: the subpackages dict to update - :param subpkg: the subpackage to parse - (may contain subpackage function name separated by :) - """ - subpkgparts = subpkg.split(":") - subpkgname = subpkgparts[0] - subpkgsplit = subpkgname[subpkgname.rfind("-") + 1:] - if len(subpkgparts) > 1: - subpkgsplit = subpkgparts[1] - - # Find start and end of package function - start = end = 0 - prefix = subpkgsplit + "() {" - for i in range(len(lines)): - if lines[i].startswith(prefix): - start = i + 1 - elif start and lines[i].startswith("}"): - end = i - break - - if not start: - # Unable to find subpackage function in the APKBUILD. - # The subpackage function could be actually missing, or this is a - # problem in the parser. For now we also don't handle subpackages with - # default functions (e.g. -dev or -doc). - # In the future we may want to specifically handle these, and throw - # an exception here for all other missing subpackage functions. - subpackages[subpkgname] = None - logging.verbose( - f"{apkbuild['pkgname']}: subpackage function '{subpkgsplit}' for " - f"subpackage '{subpkgname}' not found, ignoring") - return - - if not end: - raise RuntimeError( - f"Could not find end of subpackage function, no line starts with " - f"'}}' after '{prefix}' in {path}") - - lines = lines[start:end] - # Strip tabs before lines in function - lines = [line.strip() + "\n" for line in lines] - - # Copy variables - apkbuild = apkbuild.copy() - apkbuild["subpkgname"] = subpkgname - # Don't inherit pmb_recommends from the top-level package. - # There are two reasons for this: - # 1) the subpackage may specify its own pmb_recommends - # 2) the top-level package may list the subpackage as a pmb_recommends, - # thereby creating a circular dependency - apkbuild["_pmb_recommends"] = "" - - # Parse relevant attributes for the subpackage - _parse_attributes( - path, lines, pmb.config.apkbuild_package_attributes, apkbuild) - - # Return only properties interesting for subpackages - ret = {} - for key in pmb.config.apkbuild_package_attributes: - ret[key] = apkbuild[key] - subpackages[subpkgname] = ret - - -def apkbuild(path, check_pkgver=True, check_pkgname=True): - """ - Parse relevant information out of the APKBUILD file. This is not meant - to be perfect and catch every edge case (for that, a full shell parser - would be necessary!). Instead, it should just work with the use-cases - covered by pmbootstrap and not take too long. - Run 'pmbootstrap apkbuild_parse hello-world' for a full output example. - - :param path: full path to the APKBUILD - :param check_pkgver: verify that the pkgver is valid. - :param check_pkgname: the pkgname must match the name of the aport folder - :returns: relevant variables from the APKBUILD. Arrays get returned as - arrays. - """ - # Try to get a cached result first (we assume that the aports don't change - # in one pmbootstrap call) - if path in pmb.helpers.other.cache["apkbuild"]: - return pmb.helpers.other.cache["apkbuild"][path] - - # Read the file and check line endings - lines = read_file(path) - - # Parse all attributes from the config - ret = {key: "" for key in pmb.config.apkbuild_attributes.keys()} - _parse_attributes(path, lines, pmb.config.apkbuild_attributes, ret) - - # Sanity check: pkgname - suffix = f"/{ret['pkgname']}/APKBUILD" - if check_pkgname: - if not os.path.realpath(path).endswith(suffix): - logging.info(f"Folder: '{os.path.dirname(path)}'") - logging.info(f"Pkgname: '{ret['pkgname']}'") - raise RuntimeError("The pkgname must be equal to the name of" - " the folder that contains the APKBUILD!") - - # Sanity check: pkgver - if check_pkgver: - if not pmb.parse.version.validate(ret["pkgver"]): - logging.info( - "NOTE: Valid pkgvers are described here: " - "https://wiki.alpinelinux.org/wiki/APKBUILD_Reference#pkgver") - raise RuntimeError(f"Invalid pkgver '{ret['pkgver']}' in" - f" APKBUILD: {path}") - - # Fill cache - pmb.helpers.other.cache["apkbuild"][path] = ret - return ret - - -def kernels(args, device): - """ - Get the possible kernels from a device-* APKBUILD. - - :param device: the device name, e.g. "lg-mako" - :returns: None when the kernel is hardcoded in depends - :returns: kernel types and their description (as read from the subpackages) - possible types: "downstream", "stable", "mainline" - example: {"mainline": "Mainline description", - "downstream": "Downstream description"} - """ - # Read the APKBUILD - apkbuild_path = pmb.helpers.devices.find_path(args, device, 'APKBUILD') - if apkbuild_path is None: - return None - subpackages = apkbuild(apkbuild_path)["subpackages"] - - # Read kernels from subpackages - ret = {} - subpackage_prefix = f"device-{device}-kernel-" - for subpkgname, subpkg in subpackages.items(): - if not subpkgname.startswith(subpackage_prefix): - continue - if subpkg is None: - raise RuntimeError( - f"Cannot find subpackage function for: {subpkgname}") - name = subpkgname[len(subpackage_prefix):] - ret[name] = subpkg["pkgdesc"] - - # Return - if ret: - return ret - return None - - -def _parse_comment_tags(lines, tag): - """ - Parse tags defined as comments in a APKBUILD file. This can be used to - parse e.g. the maintainers of a package (defined using # Maintainer:). - - :param lines: lines of the APKBUILD - :param tag: the tag to parse, e.g. Maintainer - :returns: array of values of the tag, one per line - """ - prefix = f'# {tag}:' - ret = [] - for line in lines: - if line.startswith(prefix): - ret.append(line[len(prefix):].strip()) - return ret - - -def maintainers(path): - """ - Parse maintainers of an APKBUILD file. They should be defined using - # Maintainer: (first maintainer) and # Co-Maintainer: (additional - maintainers). - - :param path: full path to the APKBUILD - :returns: array of (at least one) maintainer, or None - """ - lines = read_file(path) - maintainers = _parse_comment_tags(lines, 'Maintainer') - if not maintainers: - return None - - # An APKBUILD should only have one Maintainer:, - # in pmaports others should be defined using Co-Maintainer: - if len(maintainers) > 1: - raise RuntimeError("Multiple Maintainer: lines in APKBUILD") - - maintainers += _parse_comment_tags(lines, 'Co-Maintainer') - if '' in maintainers: - raise RuntimeError("Empty (Co-)Maintainer: tag") - return maintainers - - -def unmaintained(path): - """ - Return if (and why) an APKBUILD might be unmaintained. This should be - defined using a # Unmaintained: tag in the APKBUILD. - - :param path: full path to the APKBUILD - :returns: reason why APKBUILD is unmaintained, or None - """ - unmaintained = _parse_comment_tags(read_file(path), 'Unmaintained') - if not unmaintained: - return None - return '\n'.join(unmaintained) diff --git a/pmb/parse/apkindex.py b/pmb/parse/apkindex.py deleted file mode 100644 index 3828c268..00000000 --- a/pmb/parse/apkindex.py +++ /dev/null @@ -1,396 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import collections -import logging -import os -import tarfile -import pmb.chroot.apk -import pmb.helpers.package -import pmb.helpers.repo -import pmb.parse.version - - -def parse_next_block(path, lines, start): - """ - Parse the next block in an APKINDEX. - - :param path: to the APKINDEX.tar.gz - :param start: current index in lines, gets increased in this - function. Wrapped into a list, so it can be modified - "by reference". Example: [5] - :param lines: all lines from the "APKINDEX" file inside the archive - :returns: a dictionary with the following structure: - { "arch": "noarch", - "depends": ["busybox-extras", "lddtree", ... ], - "origin": "postmarketos-mkinitfs", - "pkgname": "postmarketos-mkinitfs", - "provides": ["mkinitfs=0.0.1"], - "timestamp": "1500000000", - "version": "0.0.4-r10" } - NOTE: "depends" is not set for packages without any dependencies, - e.g. musl. - NOTE: "timestamp" and "origin" are not set for virtual packages - (#1273). We use that information to skip these virtual - packages in parse(). - :returns: None, when there are no more blocks - """ - - # Parse until we hit an empty line or end of file - ret = {} - mapping = { - "A": "arch", - "D": "depends", - "o": "origin", - "P": "pkgname", - "p": "provides", - "k": "provider_priority", - "t": "timestamp", - "V": "version", - } - end_of_block_found = False - for i in range(start[0], len(lines)): - # Check for empty line - start[0] = i + 1 - line = lines[i] - if not isinstance(line, str): - line = line.decode() - if line == "\n": - end_of_block_found = True - break - - # Parse keys from the mapping - for letter, key in mapping.items(): - if line.startswith(letter + ":"): - if key in ret: - raise RuntimeError( - "Key " + key + " (" + letter + ":) specified twice" - " in block: " + str(ret) + ", file: " + path) - ret[key] = line[2:-1] - - # Format and return the block - if end_of_block_found: - # Check for required keys - for key in ["arch", "pkgname", "version"]: - if key not in ret: - raise RuntimeError(f"Missing required key '{key}' in block " - f"{ret}, file: {path}") - - # Format optional lists - for key in ["provides", "depends"]: - if key in ret and ret[key] != "": - # Ignore all operators for now - values = ret[key].split(" ") - ret[key] = [] - for value in values: - for operator in [">", "=", "<", "~"]: - if operator in value: - value = value.split(operator)[0] - break - ret[key].append(value) - else: - ret[key] = [] - return ret - - # No more blocks - elif ret != {}: - raise RuntimeError("Last block in " + path + " does not end" - " with a new line! Delete the file and" - " try again. Last block: " + str(ret)) - return None - - -def parse_add_block(ret, block, alias=None, multiple_providers=True): - """ - Add one block to the return dictionary of parse(). - - :param ret: dictionary of all packages in the APKINDEX that is - getting built right now. This function will extend it. - :param block: return value from parse_next_block(). - :param alias: defaults to the pkgname, could be an alias from the - "provides" list. - :param multiple_providers: assume that there are more than one provider for - the alias. This makes sense when parsing the - APKINDEX files from a repository (#1122), but - not when parsing apk's installed packages DB. - """ - - # Defaults - pkgname = block["pkgname"] - alias = alias or pkgname - - # Get an existing block with the same alias - block_old = None - if multiple_providers and alias in ret and pkgname in ret[alias]: - block_old = ret[alias][pkgname] - elif not multiple_providers and alias in ret: - block_old = ret[alias] - - # Ignore the block, if the block we already have has a higher version - if block_old: - version_old = block_old["version"] - version_new = block["version"] - if pmb.parse.version.compare(version_old, version_new) == 1: - return - - # Add it to the result set - if multiple_providers: - if alias not in ret: - ret[alias] = {} - ret[alias][pkgname] = block - else: - ret[alias] = block - - -def parse(path, multiple_providers=True): - """ - Parse an APKINDEX.tar.gz file, and return its content as dictionary. - - :param path: path to an APKINDEX.tar.gz file or apk package database - (almost the same format, but not compressed). - :param multiple_providers: assume that there are more than one provider for - the alias. This makes sense when parsing the - APKINDEX files from a repository (#1122), but - not when parsing apk's installed packages DB. - :returns: (without multiple_providers) - generic format: - { pkgname: block, ... } - - example: - { "postmarketos-mkinitfs": block, - "so:libGL.so.1": block, ...} - - :returns: (with multiple_providers) - generic format: - { provide: { pkgname: block, ... }, ... } - - example: - { "postmarketos-mkinitfs": {"postmarketos-mkinitfs": block}, - "so:libGL.so.1": {"mesa-egl": block, "libhybris": block}, ...} - - NOTE: "block" is the return value from parse_next_block() above. - """ - # Require the file to exist - if not os.path.isfile(path): - logging.verbose("NOTE: APKINDEX not found, assuming no binary packages" - " exist for that architecture: " + path) - return {} - - # Try to get a cached result first - lastmod = os.path.getmtime(path) - cache_key = "multiple" if multiple_providers else "single" - if path in pmb.helpers.other.cache["apkindex"]: - cache = pmb.helpers.other.cache["apkindex"][path] - if cache["lastmod"] == lastmod: - if cache_key in cache: - return cache[cache_key] - else: - clear_cache(path) - - # Read all lines - if tarfile.is_tarfile(path): - with tarfile.open(path, "r:gz") as tar: - with tar.extractfile(tar.getmember("APKINDEX")) as handle: - lines = handle.readlines() - else: - with open(path, "r", encoding="utf-8") as handle: - lines = handle.readlines() - - # Parse the whole APKINDEX file - ret = collections.OrderedDict() - start = [0] - while True: - block = parse_next_block(path, lines, start) - if not block: - break - - # Skip virtual packages - if "timestamp" not in block: - logging.verbose("Skipped virtual package " + str(block) + " in" - " file: " + path) - continue - - # Add the next package and all aliases - parse_add_block(ret, block, None, multiple_providers) - if "provides" in block: - for alias in block["provides"]: - parse_add_block(ret, block, alias, multiple_providers) - - # Update the cache - if path not in pmb.helpers.other.cache["apkindex"]: - pmb.helpers.other.cache["apkindex"][path] = {"lastmod": lastmod} - pmb.helpers.other.cache["apkindex"][path][cache_key] = ret - return ret - - -def parse_blocks(path): - """ - Read all blocks from an APKINDEX.tar.gz into a list. - - :path: full path to the APKINDEX.tar.gz file. - :returns: all blocks in the APKINDEX, without restructuring them by - pkgname or removing duplicates with lower versions (use - parse() if you need these features). Structure: - [block, block, ...] - - NOTE: "block" is the return value from parse_next_block() above. - """ - # Parse all lines - with tarfile.open(path, "r:gz") as tar: - with tar.extractfile(tar.getmember("APKINDEX")) as handle: - lines = handle.readlines() - - # Parse lines into blocks - ret = [] - start = [0] - while True: - block = pmb.parse.apkindex.parse_next_block(path, lines, start) - if not block: - return ret - ret.append(block) - - -def clear_cache(path): - """ - Clear the APKINDEX parsing cache. - - :returns: True on successful deletion, False otherwise - """ - logging.verbose("Clear APKINDEX cache for: " + path) - if path in pmb.helpers.other.cache["apkindex"]: - del pmb.helpers.other.cache["apkindex"][path] - return True - else: - logging.verbose("Nothing to do, path was not in cache:" + - str(pmb.helpers.other.cache["apkindex"].keys())) - return False - - -def providers(args, package, arch=None, must_exist=True, indexes=None): - """ - Get all packages, which provide one package. - - :param package: of which you want to have the providers - :param arch: defaults to native arch, only relevant for indexes=None - :param must_exist: When set to true, raise an exception when the package is - not provided at all. - :param indexes: list of APKINDEX.tar.gz paths, defaults to all index files - (depending on arch) - :returns: list of parsed packages. Example for package="so:libGL.so.1": - {"mesa-egl": block, "libhybris": block} - block is the return value from parse_next_block() above. - """ - - if not indexes: - arch = arch or pmb.config.arch_native - indexes = pmb.helpers.repo.apkindex_files(args, arch) - - package = pmb.helpers.package.remove_operators(package) - - ret = collections.OrderedDict() - for path in indexes: - # Skip indexes not providing the package - index_packages = parse(path) - if package not in index_packages: - continue - - # Iterate over found providers - for provider_pkgname, provider in index_packages[package].items(): - # Skip lower versions of providers we already found - version = provider["version"] - if provider_pkgname in ret: - version_last = ret[provider_pkgname]["version"] - if pmb.parse.version.compare(version, version_last) == -1: - logging.verbose(package + ": provided by: " + - provider_pkgname + "-" + version + " in " + - path + " (but " + version_last + " is" - " higher)") - continue - - # Add the provider to ret - logging.verbose(package + ": provided by: " + provider_pkgname + - "-" + version + " in " + path) - ret[provider_pkgname] = provider - - if ret == {} and must_exist: - logging.debug("Searched in APKINDEX files: " + ", ".join(indexes)) - raise RuntimeError("Could not find package '" + package + "'!") - - return ret - - -def provider_highest_priority(providers, pkgname): - """ - Get the provider(s) with the highest provider_priority and log a message. - - :param providers: returned dict from providers(), must not be empty - :param pkgname: the package name we are interested in (for the log message) - """ - max_priority = 0 - priority_providers = collections.OrderedDict() - for provider_name, provider in providers.items(): - priority = int(provider.get("provider_priority", -1)) - if priority > max_priority: - priority_providers.clear() - max_priority = priority - if priority == max_priority: - priority_providers[provider_name] = provider - - if priority_providers: - logging.debug( - f"{pkgname}: picked provider(s) with highest priority " - f"{max_priority}: {', '.join(priority_providers.keys())}") - return priority_providers - - # None of the providers seems to have a provider_priority defined - return providers - - -def provider_shortest(providers, pkgname): - """ - Get the provider with the shortest pkgname and log a message. In most cases - this should be sufficient, e.g. 'mesa-purism-gc7000-egl, mesa-egl' or - 'gtk+2.0-maemo, gtk+2.0'. - - :param providers: returned dict from providers(), must not be empty - :param pkgname: the package name we are interested in (for the log message) - """ - ret = min(list(providers.keys()), key=len) - if len(providers) != 1: - logging.debug( - f"{pkgname}: has multiple providers (" - f"{', '.join(providers.keys())}), picked shortest: {ret}") - return providers[ret] - - -def package(args, package, arch=None, must_exist=True, indexes=None): - """ - Get a specific package's data from an apkindex. - - :param package: of which you want to have the apkindex data - :param arch: defaults to native arch, only relevant for indexes=None - :param must_exist: When set to true, raise an exception when the package is - not provided at all. - :param indexes: list of APKINDEX.tar.gz paths, defaults to all index files - (depending on arch) - :returns: a dictionary with the following structure: - { "arch": "noarch", - "depends": ["busybox-extras", "lddtree", ... ], - "pkgname": "postmarketos-mkinitfs", - "provides": ["mkinitfs=0.0.1"], - "version": "0.0.4-r10" } - or None when the package was not found. - """ - # Provider with the same package - package_providers = providers(args, package, arch, must_exist, indexes) - if package in package_providers: - return package_providers[package] - - # Any provider - if package_providers: - return pmb.parse.apkindex.provider_shortest(package_providers, package) - - # No provider - if must_exist: - raise RuntimeError("Package '" + package + "' not found in any" - " APKINDEX.") - return None diff --git a/pmb/parse/arch.py b/pmb/parse/arch.py deleted file mode 100644 index 89bce31a..00000000 --- a/pmb/parse/arch.py +++ /dev/null @@ -1,124 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import fnmatch -import platform -import pmb.parse.arch - - -def alpine_native(): - machine = platform.machine() - - mapping = { - "i686": "x86", - "x86_64": "x86_64", - "aarch64": "aarch64", - "arm64": "aarch64", - "armv6l": "armhf", - "armv7l": "armv7", - } - if machine in mapping: - return mapping[machine] - raise ValueError("Can not map platform.machine '" + machine + "'" - " to the right Alpine Linux architecture") - - -def from_chroot_suffix(args, suffix): - if suffix == "native": - return pmb.config.arch_native - if suffix in [f"rootfs_{args.device}", f"installer_{args.device}"]: - return args.deviceinfo["arch"] - if suffix.startswith("buildroot_"): - return suffix.split("_", 1)[1] - - raise ValueError("Invalid chroot suffix: " + suffix + - " (wrong device chosen in 'init' step?)") - - -def alpine_to_qemu(arch): - """ - Convert the architecture to the string used in the QEMU packaging. - This corresponds to the package name of e.g. qemu-system-aarch64. - """ - - mapping = { - "x86": "i386", - "x86_64": "x86_64", - "armhf": "arm", - "armv7": "arm", - "aarch64": "aarch64", - "riscv64": "riscv64", - } - for pattern, arch_qemu in mapping.items(): - if fnmatch.fnmatch(arch, pattern): - return arch_qemu - raise ValueError("Can not map Alpine architecture '" + arch + "'" - " to the right Debian architecture.") - - -def alpine_to_kernel(arch): - """ - Convert the architecture to the string used inside the kernel sources. - You can read the mapping from the linux-vanilla APKBUILD for example. - """ - mapping = { - "aarch64*": "arm64", - "arm*": "arm", - "ppc*": "powerpc", - "s390*": "s390", - "riscv64*": "riscv", - } - for pattern, arch_kernel in mapping.items(): - if fnmatch.fnmatch(arch, pattern): - return arch_kernel - return arch - - -def alpine_to_hostspec(arch): - """ - See: abuild source code/functions.sh.in: arch_to_hostspec() - """ - mapping = { - "aarch64": "aarch64-alpine-linux-musl", - "armel": "armv5-alpine-linux-musleabi", - "armhf": "armv6-alpine-linux-musleabihf", - "armv7": "armv7-alpine-linux-musleabihf", - "loongarch32": "loongarch32-alpine-linux-musl", - "loongarchx32": "loongarchx32-alpine-linux-musl", - "loongarch64": "loongarch64-alpine-linux-musl", - "mips": "mips-alpine-linux-musl", - "mips64": "mips64-alpine-linux-musl", - "mipsel": "mipsel-alpine-linux-musl", - "mips64el": "mips64el-alpine-linux-musl", - "ppc": "powerpc-alpine-linux-musl", - "ppc64": "powerpc64-alpine-linux-musl", - "ppc64le": "powerpc64le-alpine-linux-musl", - "riscv32": "riscv32-alpine-linux-musl", - "riscv64": "riscv64-alpine-linux-musl", - "s390x": "s390x-alpine-linux-musl", - "x86": "i586-alpine-linux-musl", - "x86_64": "x86_64-alpine-linux-musl", - } - if arch in mapping: - return mapping[arch] - - raise ValueError("Can not map Alpine architecture '" + arch + "'" - " to the right hostspec value") - - -def cpu_emulation_required(arch): - # Obvious case: host arch is target arch - if pmb.config.arch_native == arch: - return False - - # Other cases: host arch on the left, target archs on the right - not_required = { - "x86_64": ["x86"], - "armv7": ["armel", "armhf"], - "aarch64": ["armel", "armhf", "armv7"], - } - if pmb.config.arch_native in not_required: - if arch in not_required[pmb.config.arch_native]: - return False - - # No match: then it's required - return True diff --git a/pmb/parse/arguments.py b/pmb/parse/arguments.py deleted file mode 100644 index a8505258..00000000 --- a/pmb/parse/arguments.py +++ /dev/null @@ -1,926 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import argparse -import copy -import os - -try: - import argcomplete -except ImportError: - argcomplete = False - -import pmb.config -import pmb.parse.arch -import pmb.helpers.args -import pmb.helpers.pmaports - -""" This file is about parsing command line arguments passed to pmbootstrap, as - well as generating the help pages (pmbootstrap -h). All this is done with - Python's argparse. The parsed arguments get extended and finally stored in - the "args" variable, which is prominently passed to most functions all - over the pmbootstrap code base. - - See pmb/helpers/args.py for more information about the args variable. """ - - -def toggle_other_boolean_flags(*other_destinations, value=True): - """ Helper function to group several argparse flags to one. Sets multiple - other_destination to value. - - :param other_destinations: 'the other argument names' str - :param value 'the value to set the other_destinations to' bool - :returns custom Action""" - - class SetOtherDestinationsAction(argparse.Action): - def __init__(self, option_strings, dest, **kwargs): - super().__init__(option_strings, dest, nargs=0, const=value, - default=value, **kwargs) - - def __call__(self, parser, namespace, values, option_string=None): - for destination in other_destinations: - setattr(namespace, destination, value) - - return SetOtherDestinationsAction - - -def type_ondev_cp(val): - """ Parse and validate arguments to 'pmbootstrap install --ondev --cp'. - - :param val: 'HOST_SRC:CHROOT_DEST' string - :returns: (HOST_SRC, CHROOT_DEST) """ - ret = val.split(":") - - if len(ret) != 2: - raise argparse.ArgumentTypeError("does not have HOST_SRC:CHROOT_DEST" - f" format: {val}") - host_src = ret[0] - if not os.path.exists(host_src): - raise argparse.ArgumentTypeError(f"HOST_SRC not found: {host_src}") - if not os.path.isfile(host_src): - raise argparse.ArgumentTypeError(f"HOST_SRC is not a file: {host_src}") - - chroot_dest = ret[1] - if not chroot_dest.startswith("/"): - raise argparse.ArgumentTypeError("CHROOT_DEST must start with '/':" - f" {chroot_dest}") - return ret - - -def arguments_install(subparser): - ret = subparser.add_parser("install", help="set up device specific" - " chroot and install to SD card or image file") - - # Other arguments (that don't fit categories below) - ret.add_argument("--no-sshd", action="store_true", - help="do not enable the SSH daemon by default") - ret.add_argument("--no-firewall", action="store_true", - help="do not enable the firewall by default") - ret.add_argument("--password", help="dummy password for automating the" - " installation - will be handled in PLAIN TEXT during" - " install and may be logged to the logfile, do not use an" - " important password!") - ret.add_argument("--no-cgpt", help="do not use cgpt partition table", - dest="install_cgpt", action="store_false", default=True) - ret.add_argument("--zap", help="zap chroots before installing", - action="store_true") - - # Image type - group_desc = ret.add_argument_group( - "optional image type", - "Format of the resulting image. Default is generating a combined image" - " of the postmarketOS boot and root partitions (--no-split). (If the" - " device's deviceinfo_flash_method requires separate boot and root" - " partitions, then --split is the default.) Related:" - " https://postmarketos.org/partitions") - group = group_desc.add_mutually_exclusive_group() - group.add_argument("--no-split", help="create combined boot and root image" - " file", dest="split", action="store_false", - default=None) - group.add_argument("--split", help="create separate boot and root image" - " files", action="store_true") - group.add_argument("--disk", "--sdcard", - help="do not create an image file, instead" - " write to the given block device (SD card, USB" - " stick, etc.), for example: '/dev/mmcblk0'", - metavar="BLOCKDEV") - group.add_argument("--android-recovery-zip", - help="generate TWRP flashable zip (recommended read:" - " https://postmarketos.org/recoveryzip)", - action="store_true", dest="android_recovery_zip") - group.add_argument("--no-image", help="do not generate an image", - action="store_true", dest="no_image") - - # Image type "--disk" related - group = ret.add_argument_group("optional image type 'disk' arguments") - group.add_argument("--rsync", help="update the disk using rsync", - action="store_true") - - # Image type "--android-recovery-zip" related - group = ret.add_argument_group("optional image type 'android-recovery-zip'" - " arguments") - group.add_argument("--recovery-install-partition", default="system", - help="partition to flash from recovery (e.g." - " 'external_sd')", - dest="recovery_install_partition") - group.add_argument("--recovery-no-kernel", - help="do not overwrite the existing kernel", - action="store_false", dest="recovery_flash_kernel") - - # Full disk encryption (disabled by default, --no-fde has no effect) - group = ret.add_argument_group("optional full disk encryption arguments") - group.add_argument("--fde", help="use full disk encryption", - action="store_true", dest="full_disk_encryption") - group.add_argument("--no-fde", help=argparse.SUPPRESS, - action="store_true", dest="no_fde") - group.add_argument("--cipher", help="cryptsetup cipher used to encrypt the" - " the rootfs (e.g. 'aes-xts-plain64')") - group.add_argument("--iter-time", help="cryptsetup iteration time (in" - " milliseconds) to use when encrypting the system" - " partition") - - # Packages - group = ret.add_argument_group( - "optional packages arguments", - "Select or deselect packages to be included in the installation.") - group.add_argument("--add", help="comma separated list of packages to be" - " added to the rootfs (e.g. 'vim,gcc')", - metavar="PACKAGES") - group.add_argument("--no-base", - help="do not install postmarketos-base (advanced)", - action="store_false", dest="install_base") - group.add_argument("--no-recommends", dest="install_recommends", - help="do not install packages listed in _pmb_recommends" - " of the UI pmaports", - action="store_false") - - # Sparse image - group_desc = ret.add_argument_group( - "optional sparse image arguments", - "Override deviceinfo_flash_sparse for testing purpose.") - group = group_desc.add_mutually_exclusive_group() - group.add_argument("--sparse", help="generate sparse image file", - default=None, action="store_true") - group.add_argument("--no-sparse", help="do not generate sparse image file", - dest="sparse", action="store_false") - - # On-device installer - group = ret.add_argument_group( - "optional on-device installer arguments", - "Wrap the resulting image in a postmarketOS based installation OS, so" - " it can be encrypted and customized on first boot." - " Related: https://postmarketos.org/on-device-installer") - group.add_argument("--on-device-installer", "--ondev", action="store_true", - help="enable on-device installer") - group.add_argument("--no-local-pkgs", dest="install_local_pkgs", - help="do not install locally compiled packages and" - " package signing keys", action="store_false") - group.add_argument("--cp", dest="ondev_cp", nargs="+", - metavar="HOST_SRC:CHROOT_DEST", type=type_ondev_cp, - help="copy one or more files from the host system path" - " HOST_SRC to the target path CHROOT_DEST") - group.add_argument("--no-rootfs", dest="ondev_no_rootfs", - help="do not generate a pmOS rootfs as" - " /var/lib/rootfs.img (install chroot). The file" - " must either exist from a previous" - " 'pmbootstrap install' run or by providing it" - " as CHROOT_DEST with --cp", action="store_true") - - # Other - group = ret.add_argument_group("other optional arguments") - group.add_argument("--filesystem", help="root filesystem type", - choices=["ext4", "f2fs", "btrfs"]) - - -def arguments_export(subparser): - ret = subparser.add_parser("export", help="create convenience symlinks" - " to generated image files (system, kernel," - " initramfs, boot.img, ...)") - - ret.add_argument("export_folder", help="export folder, defaults to" - " /tmp/postmarketOS-export", - default="/tmp/postmarketOS-export", nargs="?") - ret.add_argument("--odin", help="odin flashable tar" - " (boot.img/kernel+initramfs only)", - action="store_true", dest="odin_flashable_tar") - ret.add_argument("--no-install", dest="autoinstall", default=True, - help="skip updating kernel/initfs", action="store_false") - return ret - - -def arguments_sideload(subparser): - ret = subparser.add_parser("sideload", help="Push packages to a running" - " phone connected over usb or wifi") - add_packages_arg(ret, nargs="+") - ret.add_argument("--host", help="ip of the device over wifi" - " (defaults to 172.16.42.1)", - default="172.16.42.1") - ret.add_argument("--port", help="SSH port of the device over wifi" - " (defaults to 22)", - default="22") - ret.add_argument("--user", help="use a different username than the" - " one set in init") - ret.add_argument("--arch", help="use a different architecture than the one" - " set in init") - ret.add_argument("--install-key", help="install the apk key from this" - " machine if needed", - action="store_true", dest="install_key") - return ret - - -def arguments_flasher(subparser): - ret = subparser.add_parser("flasher", help="flash something to the" - " target device") - ret.add_argument("--method", help="override flash method", - dest="flash_method", default=None) - sub = ret.add_subparsers(dest="action_flasher") - sub.required = True - - # Boot, flash kernel - boot = sub.add_parser("boot", help="boot a kernel once") - boot.add_argument("--cmdline", help="override kernel commandline") - flash_kernel = sub.add_parser("flash_kernel", help="flash a kernel") - for action in [boot, flash_kernel]: - action.add_argument("--no-install", dest="autoinstall", default=True, - help="skip updating kernel/initfs", - action="store_false") - flash_kernel.add_argument("--partition", default=None, - help="partition to flash the kernel to (defaults" - " to deviceinfo_flash_*_partition_kernel)") - - # Flash lk2nd - flash_lk2nd = sub.add_parser("flash_lk2nd", - help="flash lk2nd, a secondary bootloader" - " needed for various Android devices") - flash_lk2nd.add_argument("--partition", default=None, - help="partition to flash lk2nd to (defaults to" - " default boot image partition ") - - # Flash rootfs - flash_rootfs = sub.add_parser("flash_rootfs", - help="flash the rootfs to a partition on the" - " device (partition layout does not get" - " changed)") - flash_rootfs.add_argument("--partition", default=None, - help="partition to flash the rootfs to (defaults" - " to deviceinfo_flash_*_partition_rootfs," - " 'userdata' on Android may have more" - " space)") - - # Flash vbmeta - flash_vbmeta = sub.add_parser("flash_vbmeta", - help="generate and flash AVB 2.0 image with" - " disable verification flag set to a" - " partition on the device (typically called" - " vbmeta)") - flash_vbmeta.add_argument("--partition", default=None, - help="partition to flash the vbmeta to (defaults" - " to deviceinfo_flash_*_partition_vbmeta") - - # Flash dtbo - flash_dtbo = sub.add_parser("flash_dtbo", - help="flash dtbo image") - flash_dtbo.add_argument("--partition", default=None, - help="partition to flash the dtbo to (defaults" - " to deviceinfo_flash_*_partition_dtbo)") - - # Actions without extra arguments - sub.add_parser("sideload", help="sideload recovery zip") - sub.add_parser("list_flavors", help="list installed kernel flavors" + - " inside the device rootfs chroot on this computer") - sub.add_parser("list_devices", help="show connected devices") - - group = ret.add_argument_group("heimdall options", \ - "With heimdall as" - " flash method, the device automatically" - " reboots after each flash command. Use" - " --no-reboot and --resume for multiple" - " flash actions without reboot.") - group.add_argument("--no-reboot", dest="no_reboot", - help="don't automatically reboot after flashing", - action="store_true") - group.add_argument("--resume", dest="resume", - help="resume flashing after using --no-reboot", - action="store_true") - - return ret - - -def arguments_initfs(subparser): - ret = subparser.add_parser( - "initfs", help="do something with the initramfs") - sub = ret.add_subparsers(dest="action_initfs") - - # hook ls - sub.add_parser( - "hook_ls", - help="list available and installed hook packages") - - # hook add/del - hook_add = sub.add_parser("hook_add", help="add a hook package") - hook_del = sub.add_parser("hook_del", help="uninstall a hook package") - for action in [hook_add, hook_del]: - action.add_argument("hook", help="name of the hook aport, without" - f" the '{pmb.config.initfs_hook_prefix}' prefix," - " for example: 'debug-shell'") - - # ls, build, extract - sub.add_parser("ls", help="list initramfs contents") - sub.add_parser("build", help="(re)build the initramfs") - sub.add_parser("extract", - help="extract the initramfs to a temporary folder") - - return ret - - -def arguments_qemu(subparser): - ret = subparser.add_parser("qemu") - ret.add_argument("--cmdline", help="override kernel commandline") - ret.add_argument("--image-size", default="4G", - help="set rootfs size, e.g. 2048M or 2G (default: 4G)") - ret.add_argument("--second-storage", metavar="IMAGE_SIZE", - help="add a second storage with the given size (default:" - " 4G), gets created if it does not exist. Use to" - " test install from SD to eMMC", - nargs="?", default=None, const="4G") - ret.add_argument("-m", "--memory", type=int, default=1024, - help="guest RAM (default: 1024)") - ret.add_argument("-p", "--port", type=int, default=2222, - help="SSH port (default: 2222)") - - ret.add_argument("--no-kvm", dest="qemu_kvm", default=True, - action='store_false', help="Avoid using hardware-assisted" - " virtualization with KVM even when available (SLOW!)") - ret.add_argument("--cpu", dest="qemu_cpu", - help="Override emulated QEMU CPU. By default, the host" - " CPU will be emulated when using KVM and the QEMU" - " default otherwise (usually a CPU with minimal" - " features). A useful value is 'max' (emulate all" - " features that are available), use --cpu help to get a" - " list of possible values from QEMU.") - - ret.add_argument("--tablet", dest="qemu_tablet", action='store_true', - default=False, help="Use 'tablet' instead of 'mouse'" - " input for QEMU. The tablet input device automatically" - " grabs/releases the mouse when moving in/out of the QEMU" - " window. (NOTE: For some reason the mouse position is" - " not reported correctly with this in some cases...)") - - ret.add_argument("--display", dest="qemu_display", - choices=["sdl", "gtk", "none"], - help="QEMU's display parameter (default: gtk,gl=on)", - default="gtk", nargs="?") - ret.add_argument("--no-gl", dest="qemu_gl", default=True, - action='store_false', help="Avoid using GL for" - " accelerating graphics in QEMU (use software" - " rasterizer, slow!)") - ret.add_argument("--video", dest="qemu_video", default="1024x768@60", - help="Video resolution for QEMU" - " (WidthxHeight@RefreshRate). Default is 1024x768@60.") - - ret.add_argument("--audio", dest="qemu_audio", - choices=["alsa", "pa", "sdl"], - help="QEMU's audio backend (default: none)", - default=None, nargs="?") - - ret.add_argument("--host-qemu", dest="host_qemu", action='store_true', - help="Use the host system's qemu") - - ret.add_argument("--efi", action="store_true", - help="Use EFI boot (default: direct kernel image boot)") - return ret - - -def arguments_pkgrel_bump(subparser): - ret = subparser.add_parser("pkgrel_bump", help="increase the pkgrel to" - " indicate that a package must be rebuilt" - " because of a dependency change") - ret.add_argument("--dry", action="store_true", help="instead of modifying" - " APKBUILDs, exit with >0 when a package would have been" - " bumped") - - # Mutually exclusive: "--auto" or package names - mode = ret.add_mutually_exclusive_group(required=True) - mode.add_argument("--auto", action="store_true", help="all packages which" - " depend on a library which had an incompatible update" - " (libraries with a soname bump)") - mode.add_argument("packages", nargs="*", default=[]) - return ret - - -def arguments_aportupgrade(subparser): - ret = subparser.add_parser("aportupgrade", help="check for outdated" - " packages that need upgrading") - ret.add_argument("--dry", action="store_true", help="instead of modifying" - " APKBUILDs, print the changes that would be made") - ret.add_argument("--ref", help="git ref (tag, commit, etc) to use") - - # Mutually exclusive: "--all" or package names - mode = ret.add_mutually_exclusive_group(required=True) - mode.add_argument("--all", action="store_true", help="iterate through all" - " packages") - mode.add_argument("--all-stable", action="store_true", help="iterate" - " through all non-git packages") - mode.add_argument("--all-git", action="store_true", help="iterate through" - " all git packages") - mode.add_argument("packages", nargs="*", default=[]) - return ret - - -def arguments_newapkbuild(subparser): - """ - Wrapper for Alpine's "newapkbuild" command. - - Most parameters will get directly passed through, and they are defined in - "pmb/config/__init__.py". That way they can be used here and when passing - them through in "pmb/helpers/frontend.py". The order of the parameters is - kept the same as in "newapkbuild -h". - """ - sub = subparser.add_parser("newapkbuild", help="get a template to package" - " new software") - sub.add_argument("--folder", help="set postmarketOS aports folder" - " (default: main)", default="main") - - # Passthrough: Strings (e.g. -d "my description") - for entry in pmb.config.newapkbuild_arguments_strings: - sub.add_argument(entry[0], dest=entry[1], help=entry[2]) - - # Passthrough: Package type switches (e.g. -C for CMake) - group = sub.add_mutually_exclusive_group() - for entry in pmb.config.newapkbuild_arguments_switches_pkgtypes: - group.add_argument(entry[0], dest=entry[1], help=entry[2], - action="store_true") - - # Passthrough: Other switches (e.g. -c for copying sample files) - for entry in pmb.config.newapkbuild_arguments_switches_other: - sub.add_argument(entry[0], dest=entry[1], help=entry[2], - action="store_true") - - # Force switch - sub.add_argument("-f", dest="force", action="store_true", - help="force even if directory already exists") - - # Passthrough: PKGNAME[-PKGVER] | SRCURL - sub.add_argument("pkgname_pkgver_srcurl", - metavar="PKGNAME[-PKGVER] | SRCURL", - help="set either the package name (optionally with the" - " PKGVER at the end, e.g. 'hello-world-1.0') or the" - " download link to the source archive") - - -def arguments_kconfig(subparser): - # Allowed architectures - arch_native = pmb.config.arch_native - arch_choices = set(pmb.config.build_device_architectures + [arch_native]) - - # Kconfig subparser - ret = subparser.add_parser("kconfig", help="change or edit kernel configs") - sub = ret.add_subparsers(dest="action_kconfig") - sub.required = True - - # "pmbootstrap kconfig check" - check = sub.add_parser("check", help="check kernel aport config") - check.add_argument("-f", "--force", action="store_true", help="check all" - " kernels, even the ones that would be ignored by" - " default") - check.add_argument("--arch", choices=arch_choices, dest="arch") - check.add_argument("--file", help="check a file directly instead of a" - " config in a package") - check.add_argument("--no-details", action="store_false", - dest="kconfig_check_details", - help="print one generic error per component instead of" - " listing each option that needs to be adjusted") - for name in pmb.parse.kconfig.get_all_component_names(): - check.add_argument(f"--{name}", action="store_true", - dest=f"kconfig_check_{name}", - help=f"check options needed for {name} too") - add_kernel_arg(check, nargs="*") - - # "pmbootstrap kconfig edit" - edit = sub.add_parser("edit", help="edit kernel aport config") - edit.add_argument("--arch", choices=arch_choices, dest="arch") - edit.add_argument("-x", dest="xconfig", action="store_true", - help="use xconfig rather than menuconfig for kernel" - " configuration") - edit.add_argument("-n", dest="nconfig", action="store_true", - help="use nconfig rather than menuconfig for kernel" - " configuration") - add_kernel_arg(edit) - - # "pmbootstrap kconfig migrate" - migrate = sub.add_parser("migrate", - help="Migrate kconfig from older version to " - "newer. Internally runs 'make oldconfig', " - "which asks question for every new kernel " - "config option.") - migrate.add_argument("--arch", choices=arch_choices, dest="arch") - add_kernel_arg(migrate) - - -def arguments_repo_missing(subparser): - ret = subparser.add_parser("repo_missing") - package = ret.add_argument("package", nargs="?", help="only look at a" - " specific package and its dependencies") - if argcomplete: - package.completer = package_completer - ret.add_argument("--arch", choices=pmb.config.build_device_architectures, - default=pmb.config.arch_native) - ret.add_argument("--built", action="store_true", - help="include packages which exist in the binary repos") - ret.add_argument("--overview", action="store_true", - help="only print the pkgnames without any details") - return ret - - -def arguments_lint(subparser): - lint = subparser.add_parser("lint", help="run quality checks on pmaports" - " (required to pass CI)") - add_packages_arg(lint, nargs="*") - - -def arguments_status(subparser): - ret = subparser.add_parser("status", - help="quick health check for the work dir") - ret.add_argument("--details", action="store_true", - help="list passing checks in detail, not as summary") - return ret - - -def arguments_netboot(subparser): - ret = subparser.add_parser("netboot", - help="launch nbd server with pmOS rootfs") - sub = ret.add_subparsers(dest="action_netboot") - sub.required = True - - start = sub.add_parser("serve", help="start nbd server") - start.add_argument("--replace", action="store_true", - help="replace stored netboot image") - - return ret - - -def arguments_ci(subparser): - ret = subparser.add_parser("ci", help="run continuous integration scripts" - " locally of git repo in current" - " directory") - script_args = ret.add_mutually_exclusive_group() - script_args.add_argument("-a", "--all", action="store_true", - help="run all scripts") - script_args.add_argument("-f", "--fast", action="store_true", - help="run fast scripts only") - ret.add_argument("scripts", nargs="*", metavar="script", - help="name of the CI script to run, depending on the git" - " repository") - return ret - - -def package_completer(prefix, action, parser=None, parsed_args=None): - args = parsed_args - pmb.config.merge_with_args(args) - pmb.helpers.args.replace_placeholders(args) - pmb.helpers.other.init_cache() - packages = set( - package for package in pmb.helpers.pmaports.get_list(args) - if package.startswith(prefix)) - return packages - - -def kernel_completer(prefix, action, parser=None, parsed_args=None): - """ :returns: matched linux-* packages, with linux-* prefix and without """ - ret = [] - - # Full package name, starting with "linux-" - if (len("linux-") < len(prefix) and prefix.startswith("linux-") or - "linux-".startswith(prefix)): - ret += package_completer(prefix, action, parser, parsed_args) - - # Kernel name without "linux-" - packages = package_completer(f"linux-{prefix}", action, parser, - parsed_args) - ret += [package.replace("linux-", "", 1) for package in packages] - - return ret - - -def add_packages_arg(subparser, name="packages", *args, **kwargs): - arg = subparser.add_argument(name, *args, **kwargs) - if argcomplete: - arg.completer = package_completer - - -def add_kernel_arg(subparser, name="package", nargs="?", *args, **kwargs): - arg = subparser.add_argument("package", nargs=nargs, help="kernel package" - " (e.g. linux-postmarketos-allwinner)") - if argcomplete: - arg.completer = kernel_completer - - -def arguments(): - parser = argparse.ArgumentParser(prog="pmbootstrap") - arch_native = pmb.config.arch_native - arch_choices = set(pmb.config.build_device_architectures + [arch_native]) - mirrors_pmos_default = pmb.config.defaults["mirrors_postmarketos"] - - # Other - parser.add_argument("-V", "--version", action="version", - version=pmb.__version__) - parser.add_argument("-c", "--config", dest="config", - default=pmb.config.defaults["config"], - help="path to pmbootstrap.cfg file (default in" - " ~/.config/)") - parser.add_argument("--config-channels", - help="path to channels.cfg (which is by default" - " read from pmaports.git, origin/master branch)") - parser.add_argument("-mp", "--mirror-pmOS", dest="mirrors_postmarketos", - help="postmarketOS mirror, disable with: -mp=''," - " specify multiple with: -mp='one' -mp='two'," - f" default: {mirrors_pmos_default}", - metavar="URL", action="append", default=[]) - parser.add_argument("-m", "--mirror-alpine", dest="mirror_alpine", - help="Alpine Linux mirror, default: " + - pmb.config.defaults["mirror_alpine"], - metavar="URL") - parser.add_argument("-j", "--jobs", help="parallel jobs when compiling") - parser.add_argument("-E", "--extra-space", - help="specify an integer with the amount of additional" - "space to allocate to the image in MB (default" - " 0)") - parser.add_argument("-B", "--boot-size", - help="specify an integer with your preferred boot" - "partition size on target machine in MB (default" - " 128)") - parser.add_argument("-p", "--aports", - help="postmarketos aports (pmaports) path") - parser.add_argument("-t", "--timeout", help="seconds after which processes" - " get killed that stopped writing any output (default:" - " 900)", default=900, type=float) - parser.add_argument("-w", "--work", help="folder where all data" - " gets stored (chroots, caches, built packages)") - parser.add_argument("-y", "--assume-yes", help="Assume 'yes' to all" - " question prompts. WARNING: this option will" - " cause normal 'are you sure?' prompts to be" - " disabled!", - action="store_true") - parser.add_argument("--as-root", help="Allow running as root (not" - " recommended, may screw up your work folders" - " directory permissions!)", dest="as_root", - action="store_true") - parser.add_argument("-o", "--offline", help="Do not attempt to update" - " the package index files", action="store_true") - - # Compiler - parser.add_argument("--no-ccache", action="store_false", - dest="ccache", help="do not cache the compiled output") - parser.add_argument("--no-cross", action="store_false", dest="cross", - help="disable cross compiler, build only with QEMU and" - " gcc (slow!)") - - # Logging - parser.add_argument("-l", "--log", dest="log", default=None, - help="path to log file") - parser.add_argument("--details-to-stdout", dest="details_to_stdout", - help="print details (e.g. build output) to stdout," - " instead of writing to the log", - action="store_true") - parser.add_argument("-v", "--verbose", dest="verbose", - action="store_true", help="write even more to the" - " logfiles (this may reduce performance)") - parser.add_argument("-q", "--quiet", dest="quiet", action="store_true", - help="do not output any log messages") - - # Actions - sub = parser.add_subparsers(title="action", dest="action") - sub.add_parser("init", help="initialize config file") - sub.add_parser("shutdown", help="umount, unregister binfmt") - sub.add_parser("index", help="re-index all repositories with custom built" - " packages (do this after manually removing package files)") - sub.add_parser("work_migrate", help="run this before using pmbootstrap" - " non-interactively to migrate the" - " work folder version on demand") - arguments_repo_missing(sub) - arguments_kconfig(sub) - arguments_export(sub) - arguments_sideload(sub) - arguments_netboot(sub) - arguments_flasher(sub) - arguments_initfs(sub) - arguments_qemu(sub) - arguments_pkgrel_bump(sub) - arguments_aportupgrade(sub) - arguments_newapkbuild(sub) - arguments_lint(sub) - arguments_status(sub) - arguments_ci(sub) - - # Action: log - log = sub.add_parser("log", help="follow the pmbootstrap logfile") - log.add_argument("-n", "--lines", default="60", - help="count of initial output lines") - log.add_argument("-c", "--clear", help="clear the log", - action="store_true", dest="clear_log") - - # Action: zap - zap = sub.add_parser("zap", help="safely delete chroot folders") - zap.add_argument("--dry", action="store_true", help="instead of actually" - " deleting anything, print out what would have been" - " deleted") - zap.add_argument("-hc", "--http", action="store_true", help="also delete" - " http cache") - zap.add_argument("-d", "--distfiles", action="store_true", help="also" - " delete downloaded source tarballs") - zap.add_argument("-p", "--pkgs-local", action="store_true", - dest="pkgs_local", - help="also delete *all* locally compiled packages") - zap.add_argument("-m", "--pkgs-local-mismatch", action="store_true", - dest="pkgs_local_mismatch", - help="also delete locally compiled packages without" - " existing aport of same version") - zap.add_argument("-n", "--netboot", action="store_true", - help="also delete stored images for netboot") - zap.add_argument("-o", "--pkgs-online-mismatch", action="store_true", - dest="pkgs_online_mismatch", - help="also delete outdated packages from online mirrors" - " (that have been downloaded to the apk cache)") - zap.add_argument("-r", "--rust", action="store_true", - help="also delete rust related caches") - - zap_all_delete_args = ["http", "distfiles", "pkgs_local", - "pkgs_local_mismatch", "netboot", "pkgs_online_mismatch", - "rust"] - zap_all_delete_args_print = [arg.replace("_", "-") - for arg in zap_all_delete_args] - zap.add_argument("-a", "--all", - action=toggle_other_boolean_flags(*zap_all_delete_args), - help="delete everything, equivalent to: " - f"--{' --'.join(zap_all_delete_args_print)}") - - # Action: stats - stats = sub.add_parser("stats", help="show ccache stats") - stats.add_argument("--arch", default=arch_native, choices=arch_choices) - - # Action: update - update = sub.add_parser("update", help="update all existing APKINDEX" - " files") - update.add_argument("--arch", default=None, choices=arch_choices, - help="only update a specific architecture") - update.add_argument("--non-existing", action="store_true", help="do not" - " only update the existing APKINDEX files, but all of" - " them", dest="non_existing") - - # Action: build_init / chroot - build_init = sub.add_parser("build_init", help="initialize build" - " environment (usually you do not need to call" - " this)") - chroot = sub.add_parser("chroot", help="start shell in chroot") - chroot.add_argument("--add", help="build/install comma separated list of" - " packages in the chroot before entering it") - chroot.add_argument("--user", help="run the command as user, not as root", - action="store_true") - chroot.add_argument("--output", choices=["log", "stdout", "interactive", - "tui", "background"], help="how the output of the" - " program should be handled, choose from: 'log'," - " 'stdout', 'interactive', 'tui' (default)," - " 'background'. Details: pmb/helpers/run_core.py", - default="tui") - chroot.add_argument("command", default=["sh", "-i"], help="command" - " to execute inside the chroot. default: sh", - nargs='*') - chroot.add_argument("-x", "--xauth", action="store_true", - help="Copy .Xauthority and set environment variables," - " so X11 applications can be started (native" - " chroot only)") - chroot.add_argument("-i", "--install-blockdev", action="store_true", - help="Create a sparse image file and mount it as" - " /dev/install, just like during the" - " installation process.") - for action in [build_init, chroot]: - suffix = action.add_mutually_exclusive_group() - if action == chroot: - suffix.add_argument("-r", "--rootfs", action="store_true", - help="Chroot for the device root file system") - suffix.add_argument("-b", "--buildroot", nargs="?", const="device", - choices={"device"} | arch_choices, - help="Chroot for building packages, defaults to" - " device architecture") - suffix.add_argument("-s", "--suffix", default=None, - help="Specify any chroot suffix, defaults to" - " 'native'") - - # Action: install - arguments_install(sub) - - # Action: checksum - checksum = sub.add_parser("checksum", help="update aport checksums") - checksum.add_argument("--verify", action="store_true", help="download" - " sources and verify that the checksums of the" - " APKBUILD match, instead of updating them") - add_packages_arg(checksum, nargs="+") - - # Action: aportgen - aportgen = sub.add_parser("aportgen", help="generate a postmarketOS" - " specific package build recipe" - " (aport/APKBUILD)") - aportgen.add_argument("--fork-alpine", help="fork the alpine upstream" - " package", action="store_true", - dest="fork_alpine") - add_packages_arg(aportgen, nargs="+") - - # Action: build - build = sub.add_parser("build", help="create a package for a" - " specific architecture") - build.add_argument("--arch", choices=arch_choices, default=None, - help="CPU architecture to build for (default: " + - arch_native + " or first available architecture in" - " APKBUILD)") - build.add_argument("--force", action="store_true", help="even build if not" - " necessary") - build.add_argument("--strict", action="store_true", help="(slower) zap and" - " install only required depends when building, to" - " detect dependency errors") - build.add_argument("--src", help="override source used to build the" - " package with a local folder (the APKBUILD must" - " expect the source to be in $builddir, so you might" - " need to adjust it)", - nargs=1) - build.add_argument("-i", "--ignore-depends", action="store_true", - help="only build and install makedepends from an" - " APKBUILD, ignore the depends (old behavior). This is" - " faster for device packages for example, because then" - " you don't need to build and install the kernel. But" - " it is incompatible with how Alpine's abuild handles" - " it.", - dest="ignore_depends") - build.add_argument("-n", "--no-depends", action="store_true", - help="never build dependencies, abort instead", - dest="no_depends") - build.add_argument("--go-mod-cache", action="store_true", default=None, - help="for go packages: Usually they should bundle the" - " dependency sources instead of downloading them" - " at build time. But if they don't (e.g. with" - " pmbootstrap build --src), then this option can" - " be used to let GOMODCACHE point into" - " pmbootstrap's work dir to only download" - " dependencies once. (default: true with --src," - " false otherwise)") - build.add_argument("--no-go-mod-cache", - action="store_false", dest="go_mod_cache", default=None, - help="don't set GOMODCACHE") - build.add_argument("--envkernel", action="store_true", - help="Create an apk package from the build output of" - " a kernel compiled locally on the host or with envkernel.sh.") - add_packages_arg(build, nargs="+") - - # Action: deviceinfo_parse - deviceinfo_parse = sub.add_parser("deviceinfo_parse") - deviceinfo_parse.add_argument("devices", nargs="*") - deviceinfo_parse.add_argument("--kernel", help="the kernel to select (for" - " device packages with multiple kernels)," - " e.g. 'downstream', 'mainline'", - dest="deviceinfo_parse_kernel", - metavar="KERNEL") - - # Action: apkbuild_parse - apkbuild_parse = sub.add_parser("apkbuild_parse") - add_packages_arg(apkbuild_parse, nargs="*") - - # Action: apkindex_parse - apkindex_parse = sub.add_parser("apkindex_parse") - apkindex_parse.add_argument("apkindex_path") - add_packages_arg(apkindex_parse, "package", nargs="?") - - # Action: config - config = sub.add_parser("config", - help="get and set pmbootstrap options") - config.add_argument("-r", "--reset", action="store_true", - help="Reset config options with the given name to it's" - " default.") - config.add_argument("name", nargs="?", help="variable name, one of: " + - ", ".join(sorted(pmb.config.config_keys)), - choices=pmb.config.config_keys, metavar="name") - config.add_argument("value", nargs="?", help="set variable to value") - - # Action: bootimg_analyze - bootimg_analyze = sub.add_parser("bootimg_analyze", help="Extract all the" - " information from an existing boot.img") - bootimg_analyze.add_argument("path", help="path to the boot.img") - bootimg_analyze.add_argument("--force", "-f", action="store_true", - help="force even if the file seems to be" - " invalid") - - # Action: pull - sub.add_parser("pull", help="update all git repositories that pmbootstrap" - " cloned (pmaports, etc.)") - - if argcomplete: - argcomplete.autocomplete(parser, always_complete_options="long") - - # Parse and extend arguments (also backup unmodified result from argparse) - args = parser.parse_args() - setattr(args, "from_argparse", copy.deepcopy(args)) - setattr(args.from_argparse, "from_argparse", args.from_argparse) - pmb.helpers.args.init(args) - - if getattr(args, "go_mod_cache", None) is None: - gomodcache = True if getattr(args, "src", None) else False - setattr(args, "go_mod_cache", gomodcache) - - return args diff --git a/pmb/parse/binfmt_info.py b/pmb/parse/binfmt_info.py deleted file mode 100644 index a50861cb..00000000 --- a/pmb/parse/binfmt_info.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import pmb.config - -# Get magic and mask from binfmt info file -# Return: {magic: ..., mask: ...} - - -def binfmt_info(arch_qemu): - # Parse the info file - full = {} - info = pmb.config.pmb_src + "/pmb/data/qemu-user-binfmt.txt" - logging.verbose("parsing: " + info) - with open(info, "r") as handle: - for line in handle: - if line.startswith('#') or "=" not in line: - continue - split = line.split("=") - key = split[0].strip() - value = split[1] - full[key] = value[1:-2] - - ret = {} - logging.verbose("filtering by architecture: " + arch_qemu) - for type in ["mask", "magic"]: - key = arch_qemu + "_" + type - if key not in full: - raise RuntimeError( - f"Could not find key {key} in binfmt info file: {info}") - ret[type] = full[key] - logging.verbose("=> " + str(ret)) - return ret diff --git a/pmb/parse/bootimg.py b/pmb/parse/bootimg.py deleted file mode 100644 index 02b0f234..00000000 --- a/pmb/parse/bootimg.py +++ /dev/null @@ -1,141 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import logging -import pmb - - -def is_dtb(path): - if not os.path.isfile(path): - return False - with open(path, 'rb') as f: - # Check FDT magic identifier (0xd00dfeed) - return f.read(4) == b'\xd0\x0d\xfe\xed' - - -def get_mtk_label(path): - """ Read the label from the MediaTek header of the kernel or ramdisk inside - an extracted boot.img. - :param path: to either the kernel or ramdisk extracted from boot.img - :returns: * None: file does not exist or does not have MediaTek header - * Label string (e.g. "ROOTFS", "KERNEL") """ - if not os.path.exists(path): - return None - - with open(path, 'rb') as f: - # Check Mediatek header (0x88168858) - if not f.read(4) == b'\x88\x16\x88\x58': - return None - f.seek(8) - label = f.read(32).decode("utf-8").rstrip('\0') - - if label == "RECOVERY": - logging.warning( - "WARNING: This boot.img has MediaTek headers. Since you passed a" - " recovery image instead of a regular boot.img, we can't tell what" - " the ramdisk signature label is supposed to be, so we assume that" - " it's the most common value, ROOTFS. There is a chance that this" - " is wrong and it may not boot; in that case, run bootimg_analyze" - " again with a regular boot.img. If this *is* a regular boot.img," - " replace the value of deviceinfo_bootimg_mtk_label_ramdisk with" - " 'RECOVERY'.") - return "ROOTFS" - else: - return label - - -def bootimg(args, path): - if not os.path.exists(path): - raise RuntimeError("Could not find file '" + path + "'") - - logging.info("NOTE: You will be prompted for your sudo/doas password, so" - " we can set up a chroot to extract and analyze your" - " boot.img file") - pmb.chroot.apk.install(args, ["file", "unpackbootimg"]) - - temp_path = pmb.chroot.other.tempfolder(args, "/tmp/bootimg_parser") - bootimg_path = f"{args.work}/chroot_native{temp_path}/boot.img" - - # Copy the boot.img into the chroot temporary folder - # and make it world readable - pmb.helpers.run.root(args, ["cp", path, bootimg_path]) - pmb.helpers.run.root(args, ["chmod", "a+r", bootimg_path]) - - file_output = pmb.chroot.user(args, ["file", "-b", "boot.img"], - working_dir=temp_path, - output_return=True).rstrip() - if "android bootimg" not in file_output.lower(): - if "force" in args and args.force: - logging.warning("WARNING: boot.img file seems to be invalid, but" - " proceeding anyway (-f specified)") - else: - logging.info("NOTE: If you are sure that your file is a valid" - " boot.img file, you could force the analysis" - " with: 'pmbootstrap bootimg_analyze " + path + - " -f'") - if ("linux kernel" in file_output.lower() or - "ARM OpenFirmware FORTH Dictionary" in file_output): - raise RuntimeError("File is a Kernel image, you might need the" - " 'heimdall-isorec' flash method. See also:" - " ") - else: - raise RuntimeError("File is not an Android boot.img. (" + - file_output + ")") - - # Extract all the files - pmb.chroot.user(args, ["unpackbootimg", "-i", "boot.img"], - working_dir=temp_path) - - output = {} - header_version = 0 - # Get base, offsets, pagesize, cmdline and qcdt info - # This file does not exist for example for qcdt images - if os.path.isfile(f"{bootimg_path}-header_version"): - with open(f"{bootimg_path}-header_version", 'r') as f: - header_version = int(f.read().replace('\n', '')) - output["header_version"] = str(header_version) - - if header_version >= 3: - output["pagesize"] = "4096" - else: - with open(f"{bootimg_path}-base", 'r') as f: - output["base"] = ("0x%08x" % int(f.read().replace('\n', ''), 16)) - with open(f"{bootimg_path}-kernel_offset", 'r') as f: - output["kernel_offset"] = ("0x%08x" - % int(f.read().replace('\n', ''), 16)) - with open(f"{bootimg_path}-ramdisk_offset", 'r') as f: - output["ramdisk_offset"] = ("0x%08x" - % int(f.read().replace('\n', ''), 16)) - with open(f"{bootimg_path}-second_offset", 'r') as f: - output["second_offset"] = ("0x%08x" - % int(f.read().replace('\n', ''), 16)) - with open(f"{bootimg_path}-tags_offset", 'r') as f: - output["tags_offset"] = ("0x%08x" - % int(f.read().replace('\n', ''), 16)) - with open(f"{bootimg_path}-pagesize", 'r') as f: - output["pagesize"] = f.read().replace('\n', '') - - if header_version == 2: - with open(f"{bootimg_path}-dtb_offset", 'r') as f: - output["dtb_offset"] = ("0x%08x" - % int(f.read().replace('\n', ''), 16)) - - if get_mtk_label(f"{bootimg_path}-kernel") is not None: - output["mtk_label_kernel"] = get_mtk_label(f"{bootimg_path}-kernel") - if get_mtk_label(f"{bootimg_path}-ramdisk") is not None: - output["mtk_label_ramdisk"] = get_mtk_label(f"{bootimg_path}-ramdisk") - - output["qcdt"] = ("true" if os.path.isfile(f"{bootimg_path}-dt") and - os.path.getsize(f"{bootimg_path}-dt") > 0 else "false") - - output["dtb_second"] = ("true" if is_dtb(f"{bootimg_path}-second") - else "false") - - with open(f"{bootimg_path}-cmdline", 'r') as f: - output["cmdline"] = f.read().replace('\n', '') - - # Cleanup - pmb.chroot.root(args, ["rm", "-r", temp_path]) - - return output diff --git a/pmb/parse/cpuinfo.py b/pmb/parse/cpuinfo.py deleted file mode 100644 index 33ff17fb..00000000 --- a/pmb/parse/cpuinfo.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2023 Lary Gibaud -# SPDX-License-Identifier: GPL-3.0-or-later -import re - - -def arm_big_little_first_group_ncpus(): - """ - Infer from /proc/cpuinfo on aarch64 if this is a big/little architecture - (if there is different processor models) and the number of cores in the - first model group. - https://en.wikipedia.org/wiki/ARM_big.LITTLE - - :returns: the number of cores of the first model in the order given by - linux or None if not big/little architecture - """ - pattern = re.compile(r"^CPU part\s*: (\w+)$") - counter = 0 - part = None - - with open('/proc/cpuinfo', 'r') as cpuinfo: - for line in cpuinfo: - match = pattern.match(line) - if match: - grp = match.group(1) - if not part: - part = grp - counter += 1 - elif part == grp: - counter += 1 - else: - return counter - return None diff --git a/pmb/parse/depends.py b/pmb/parse/depends.py deleted file mode 100644 index cb487ca1..00000000 --- a/pmb/parse/depends.py +++ /dev/null @@ -1,186 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import pmb.chroot -import pmb.chroot.apk -import pmb.helpers.pmaports -import pmb.parse.apkindex -import pmb.parse.arch - - -def package_from_aports(args, pkgname_depend): - """ - :returns: None when there is no aport, or a dict with the keys pkgname, - depends, version. The version is the combined pkgver and pkgrel. - """ - # Get the aport - aport = pmb.helpers.pmaports.find(args, pkgname_depend, False) - if not aport: - return None - - # Parse its version - apkbuild = pmb.parse.apkbuild(f"{aport}/APKBUILD") - pkgname = apkbuild["pkgname"] - version = apkbuild["pkgver"] + "-r" + apkbuild["pkgrel"] - - # Return the dict - logging.verbose( - f"{pkgname_depend}: provided by: {pkgname}-{version} in {aport}") - return {"pkgname": pkgname, - "depends": apkbuild["depends"], - "version": version} - - -def package_provider(args, pkgname, pkgnames_install, suffix="native"): - """ - :param pkgnames_install: packages to be installed - :returns: a block from the apkindex: {"pkgname": "...", ...} - or None (no provider found) - """ - # Get all providers - arch = pmb.parse.arch.from_chroot_suffix(args, suffix) - providers = pmb.parse.apkindex.providers(args, pkgname, arch, False) - - # 0. No provider - if len(providers) == 0: - return None - - # 1. Only one provider - logging.verbose(f"{pkgname}: provided by: {', '.join(providers)}") - if len(providers) == 1: - return list(providers.values())[0] - - # 2. Provider with the same package name - if pkgname in providers: - logging.verbose(f"{pkgname}: choosing package of the same name as " - "provider") - return providers[pkgname] - - # 3. Pick a package that will be installed anyway - for provider_pkgname, provider in providers.items(): - if provider_pkgname in pkgnames_install: - logging.verbose(f"{pkgname}: choosing provider '{provider_pkgname}" - "', because it will be installed anyway") - return provider - - # 4. Pick a package that is already installed - installed = pmb.chroot.apk.installed(args, suffix) - for provider_pkgname, provider in providers.items(): - if provider_pkgname in installed: - logging.verbose(f"{pkgname}: choosing provider '{provider_pkgname}" - f"', because it is installed in the '{suffix}' " - "chroot already") - return provider - - # 5. Pick an explicitly selected provider - provider_pkgname = args.selected_providers.get(pkgname, "") - if provider_pkgname in providers: - logging.verbose(f"{pkgname}: choosing provider '{provider_pkgname}', " - "because it was explicitly selected.") - return providers[provider_pkgname] - - # 6. Pick the provider(s) with the highest priority - providers = pmb.parse.apkindex.provider_highest_priority( - providers, pkgname) - if len(providers) == 1: - return list(providers.values())[0] - - # 7. Pick the shortest provider. (Note: Normally apk would fail here!) - return pmb.parse.apkindex.provider_shortest(providers, pkgname) - - -def package_from_index(args, pkgname_depend, pkgnames_install, package_aport, - suffix="native"): - """ - :returns: None when there is no aport and no binary package, or a dict with - the keys pkgname, depends, version from either the aport or the - binary package provider. - """ - # No binary package - provider = package_provider(args, pkgname_depend, pkgnames_install, suffix) - if not provider: - return package_aport - - # Binary package outdated - if (package_aport and pmb.parse.version.compare(package_aport["version"], - provider["version"]) == 1): - logging.verbose(pkgname_depend + ": binary package is outdated") - return package_aport - - # Binary up to date (#893: overrides aport, so we have sonames in depends) - if package_aport: - logging.verbose(pkgname_depend + ": binary package is" - " up to date, using binary dependencies" - " instead of the ones from the aport") - return provider - - -def recurse(args, pkgnames, suffix="native"): - """ - Find all dependencies of the given pkgnames. - - :param suffix: the chroot suffix to resolve dependencies for. If a package - has multiple providers, we look at the installed packages in - the chroot to make a decision (see package_provider()). - :returns: list of pkgnames: consists of the initial pkgnames plus all - depends. Dependencies explicitly marked as conflicting are - prefixed with !. - """ - logging.debug(f"({suffix}) calculate depends of {', '.join(pkgnames)} " - "(pmbootstrap -v for details)") - - # Iterate over todo-list until is is empty - todo = list(pkgnames) - required_by = {} - ret = [] - while len(todo): - # Skip already passed entries - pkgname_depend = todo.pop(0) - if pkgname_depend in ret: - continue - - # Check if the dependency is explicitly marked as conflicting - is_conflict = pkgname_depend.startswith("!") - pkgname_depend = pkgname_depend.lstrip("!") - - # Get depends and pkgname from aports - pkgnames_install = list(ret) + todo - package = package_from_aports(args, pkgname_depend) - package = package_from_index(args, pkgname_depend, pkgnames_install, - package, suffix) - - # Nothing found - if not package: - if is_conflict: - # This package was probably dropped from the repos, so we don't - # care if it doesn't exist since it's a conflicting depend that - # wouldn't be installed anyways. - continue - source = 'world' - if pkgname_depend in required_by: - source = ', '.join(required_by[pkgname_depend]) - raise RuntimeError(f"Could not find dependency '{pkgname_depend}' " - "in checked out pmaports dir or any APKINDEX. " - f"Required by '{source}'. See: " - "https://postmarketos.org/depends") - - # Determine pkgname - pkgname = package["pkgname"] - if is_conflict: - pkgname = f"!{pkgname}" - - # Append to todo/ret (unless it is a duplicate) - if pkgname in ret: - logging.verbose(f"{pkgname}: already found") - else: - if not is_conflict: - depends = package["depends"] - logging.verbose(f"{pkgname}: depends on: {','.join(depends)}") - if depends: - todo += depends - for dep in depends: - if dep not in required_by: - required_by[dep] = set() - required_by[dep].add(pkgname_depend) - ret.append(pkgname) - return ret diff --git a/pmb/parse/deviceinfo.py b/pmb/parse/deviceinfo.py deleted file mode 100644 index 39532bbf..00000000 --- a/pmb/parse/deviceinfo.py +++ /dev/null @@ -1,156 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import copy -import logging -import os -import pmb.config -import pmb.helpers.devices - - -def sanity_check(info, path): - # Resolve path for more readable error messages - path = os.path.realpath(path) - - # Legacy errors - if "flash_methods" in info: - raise RuntimeError("deviceinfo_flash_methods has been renamed to" - " deviceinfo_flash_method. Please adjust your" - " deviceinfo file: " + path) - if "external_disk" in info or "external_disk_install" in info: - raise RuntimeError("Instead of deviceinfo_external_disk and" - " deviceinfo_external_disk_install, please use the" - " new variable deviceinfo_external_storage in your" - " deviceinfo file: " + path) - if "msm_refresher" in info: - raise RuntimeError("It is enough to specify 'msm-fb-refresher' in the" - " depends of your device's package now. Please" - " delete the deviceinfo_msm_refresher line in: " + - path) - if "flash_fastboot_vendor_id" in info: - raise RuntimeError("Fastboot doesn't allow specifying the vendor ID" - " anymore (#1830). Try removing the" - " 'deviceinfo_flash_fastboot_vendor_id' line in: " + - path + " (if you are sure that you need this, then" - " we can probably bring it back to fastboot, just" - " let us know in the postmarketOS issues!)") - if "nonfree" in info: - raise RuntimeError("deviceinfo_nonfree is unused. " - "Please delete it in: " + path) - if "dev_keyboard" in info: - raise RuntimeError("deviceinfo_dev_keyboard is unused. " - "Please delete it in: " + path) - if "date" in info: - raise RuntimeError("deviceinfo_date was replaced by deviceinfo_year. " - "Set it to the release year in: " + path) - - # "codename" is required - codename = os.path.basename(os.path.dirname(path)) - if codename.startswith("device-"): - codename = codename[7:] - if "codename" not in info or info["codename"] != codename: - raise RuntimeError(f"Please add 'deviceinfo_codename=\"{codename}\"' " - f"to: {path}") - - # "chassis" is required - chassis_types = pmb.config.deviceinfo_chassis_types - if "chassis" not in info or not info["chassis"]: - logging.info("NOTE: the most commonly used chassis types in" - " postmarketOS are 'handset' (for phones) and 'tablet'.") - raise RuntimeError(f"Please add 'deviceinfo_chassis' to: {path}") - - # "arch" is required - if "arch" not in info or not info["arch"]: - raise RuntimeError(f"Please add 'deviceinfo_arch' to: {path}") - - # "chassis" validation - chassis_type = info["chassis"] - if chassis_type not in chassis_types: - raise RuntimeError(f"Unknown chassis type '{chassis_type}', should" - f" be one of {', '.join(chassis_types)}. Fix this" - f" and try again: {path}") - - -def _parse_kernel_suffix(args, info, device, kernel): - """ - Remove the kernel suffix (as selected in 'pmbootstrap init') from - deviceinfo variables. Related: - https://wiki.postmarketos.org/wiki/Device_specific_package#Multiple_kernels - - :param info: deviceinfo dict, e.g.: - {"a": "first", - "b_mainline": "second", - "b_downstream": "third"} - :param device: which device info belongs to - :param kernel: which kernel suffix to remove (e.g. "mainline") - :returns: info, but with the configured kernel suffix removed, e.g: - {"a": "first", - "b": "second", - "b_downstream": "third"} - """ - # Do nothing if the configured kernel isn't available in the kernel (e.g. - # after switching from device with multiple kernels to device with only one - # kernel) - kernels = pmb.parse._apkbuild.kernels(args, device) - if not kernels or kernel not in kernels: - logging.verbose(f"parse_kernel_suffix: {kernel} not in {kernels}") - return info - - ret = copy.copy(info) - - suffix_kernel = kernel.replace("-", "_") - for key in pmb.config.deviceinfo_attributes: - key_kernel = f"{key}_{suffix_kernel}" - if key_kernel not in ret: - continue - - # Move ret[key_kernel] to ret[key] - logging.verbose(f"parse_kernel_suffix: {key_kernel} => {key}") - ret[key] = ret[key_kernel] - del ret[key_kernel] - - return ret - - -def deviceinfo(args, device=None, kernel=None): - """ - :param device: defaults to args.device - :param kernel: defaults to args.kernel - """ - if not device: - device = args.device - if not kernel: - kernel = args.kernel - - if not os.path.exists(args.aports): - logging.fatal(f"Aports directory is missing, expected: {args.aports}") - logging.fatal("Please provide a path to the aports directory using the" - " -p flag") - raise RuntimeError("Aports directory missing") - - path = pmb.helpers.devices.find_path(args, device, 'deviceinfo') - if not path: - raise RuntimeError( - "Device '" + device + "' not found. Run 'pmbootstrap init' to" - " start a new device port or to choose another device. It may have" - " been renamed, see ") - - ret = {} - with open(path) as handle: - for line in handle: - if not line.startswith("deviceinfo_"): - continue - if "=" not in line: - raise SyntaxError(f"{path}: No '=' found:\n\t{line}") - split = line.split("=", 1) - key = split[0][len("deviceinfo_"):] - value = split[1].replace("\"", "").replace("\n", "") - ret[key] = value - - # Assign empty string as default - for key in pmb.config.deviceinfo_attributes: - if key not in ret: - ret[key] = "" - - ret = _parse_kernel_suffix(args, ret, device, kernel) - sanity_check(ret, path) - return ret diff --git a/pmb/parse/kconfig.py b/pmb/parse/kconfig.py deleted file mode 100644 index 9562dd71..00000000 --- a/pmb/parse/kconfig.py +++ /dev/null @@ -1,335 +0,0 @@ -# Copyright 2023 Attila Szollosi -# SPDX-License-Identifier: GPL-3.0-or-later -import glob -import logging -import re -import os - -import pmb.build -import pmb.config -import pmb.parse -import pmb.helpers.pmaports - - -def get_all_component_names(): - """ - Get the component names from kconfig_options variables in - pmb/config/__init__.py. This does not include the base options. - - :returns: a list of component names, e.g. ["waydroid", "iwd", "nftables"] - """ - prefix = "kconfig_options_" - ret = [] - - for key in pmb.config.__dict__.keys(): - if key.startswith(prefix): - ret += [key.split(prefix, 1)[1]] - - return ret - - -def is_set(config, option): - """ - Check, whether a boolean or tristate option is enabled - either as builtin or module. - - :param config: full kernel config as string - :param option: name of the option to check, e.g. EXT4_FS - :returns: True if the check passed, False otherwise - """ - return re.search("^CONFIG_" + option + "=[ym]$", config, re.M) is not None - - -def is_set_str(config, option, string): - """ - Check, whether a config option contains a string as value. - - :param config: full kernel config as string - :param option: name of the option to check, e.g. EXT4_FS - :param string: the expected string - :returns: True if the check passed, False otherwise - """ - match = re.search("^CONFIG_" + option + "=\"(.*)\"$", config, re.M) - if match: - return string == match.group(1) - else: - return False - - -def is_in_array(config, option, string): - """ - Check, whether a config option contains string as an array element - - :param config: full kernel config as string - :param option: name of the option to check, e.g. EXT4_FS - :param string: the string expected to be an element of the array - :returns: True if the check passed, False otherwise - """ - match = re.search("^CONFIG_" + option + "=\"(.*)\"$", config, re.M) - if match: - values = match.group(1).split(",") - return string in values - else: - return False - - -def check_option(component, details, config, config_path, option, - option_value): - """ - Check, whether one kernel config option has a given value. - - :param component: name of the component to test (postmarketOS, waydroid, …) - :param details: print all warnings if True, otherwise one per component - :param config: full kernel config as string - :param config_path: full path to kernel config file - :param option: name of the option to check, e.g. EXT4_FS - :param option_value: expected value, e.g. True, "str", ["str1", "str2"] - :returns: True if the check passed, False otherwise - """ - def warn_ret_false(should_str): - config_name = os.path.basename(config_path) - if details: - logging.warning(f"WARNING: {config_name}: CONFIG_{option} should" - f" {should_str} ({component}):" - f" https://wiki.postmarketos.org/wiki/kconfig#CONFIG_{option}") - else: - logging.warning(f"WARNING: {config_name} isn't configured properly" - f" ({component}), run 'pmbootstrap kconfig check'" - " for details!") - return False - - if isinstance(option_value, list): - for string in option_value: - if not is_in_array(config, option, string): - return warn_ret_false(f'contain "{string}"') - elif isinstance(option_value, str): - if not is_set_str(config, option, option_value): - return warn_ret_false(f'be set to "{option_value}"') - elif option_value in [True, False]: - if option_value != is_set(config, option): - return warn_ret_false("be set" if option_value else "*not* be set") - else: - raise RuntimeError("kconfig check code can only handle booleans," - f" strings and arrays. Given value {option_value}" - " is not supported. If you need this, please patch" - " pmbootstrap or open an issue.") - return True - - -def check_config_options_set(config, config_path, config_arch, options, - component, pkgver, details=False): - """ - Check, whether all the kernel config passes all rules of one component. - Print a warning if any is missing. - - :param config: full kernel config as string - :param config_path: full path to kernel config file - :param config_arch: architecture name (alpine format, e.g. aarch64, x86_64) - :param options: kconfig_options* var passed from pmb/config/__init__.py: - kconfig_options_example = { - ">=0.0.0": { # all versions - "all": { # all arches - "ANDROID_PARANOID_NETWORK": False, - }, - } - :param component: name of the component to test (postmarketOS, waydroid, …) - :param pkgver: kernel version - :param details: print all warnings if True, otherwise one per component - :returns: True if the check passed, False otherwise - """ - ret = True - for rules, archs_options in options.items(): - # Skip options irrelevant for the current kernel's version - # Example rules: ">=4.0 <5.0" - skip = False - for rule in rules.split(" "): - if not pmb.parse.version.check_string(pkgver, rule): - skip = True - break - if skip: - continue - - for archs, options in archs_options.items(): - if archs != "all": - # Split and check if the device's architecture architecture has - # special config options. If option does not contain the - # architecture of the device kernel, then just skip the option. - architectures = archs.split(" ") - if config_arch not in architectures: - continue - - for option, option_value in options.items(): - if not check_option(component, details, config, config_path, - option, option_value): - ret = False - # Stop after one non-detailed error - if not details: - return False - return ret - - -def check_config(config_path, config_arch, pkgver, components_list=[], - details=False, enforce_check=True): - """ - Check, whether one kernel config passes the rules of multiple components. - - :param config_path: full path to kernel config file - :param config_arch: architecture name (alpine format, e.g. aarch64, x86_64) - :param pkgver: kernel version - :param components_list: what to check for, e.g. ["waydroid", "iwd"] - :param details: print all warnings if True, otherwise one per component - :param enforce_check: set to False to not fail kconfig check as long as - everything in kconfig_options is set correctly, even - if additional components are checked - :returns: True if the check passed, False otherwise - """ - logging.debug(f"Check kconfig: {config_path}") - with open(config_path) as handle: - config = handle.read() - - # Devices in all categories need basic options - # https://wiki.postmarketos.org/wiki/Device_categorization - components_list = ["postmarketOS"] + components_list - - # Devices in "community" or "main" need additional options - if "community" in components_list: - components_list += [ - "containers", - "filesystems", - "iwd", - "netboot", - "nftables", - "usb_gadgets", - "waydroid", - "wireguard", - "zram", - ] - - components = {} - for name in components_list: - if name == "postmarketOS": - pmb_config_var = "kconfig_options" - else: - pmb_config_var = f"kconfig_options_{name}" - - components[name] = getattr(pmb.config, pmb_config_var, None) - assert components[name], f"invalid kconfig component name: {name}" - - results = [] - for component, options in components.items(): - result = check_config_options_set(config, config_path, config_arch, - options, component, pkgver, details) - # We always enforce "postmarketOS" component and when explicitly - # requested - if enforce_check or component == "postmarketOS": - results += [result] - - return all(results) - - -def check(args, pkgname, components_list=[], details=False, must_exist=True): - """ - Check for necessary kernel config options in a package. - - :param pkgname: the package to check for, optionally without "linux-" - :param components_list: what to check for, e.g. ["waydroid", "iwd"] - :param details: print all warnings if True, otherwise one generic warning - :param must_exist: if False, just return if the package does not exist - :returns: True when the check was successful, False otherwise - None if the aport cannot be found (only if must_exist=False) - """ - # Don't modify the original component_list (arguments are passed as - # reference, a list is not immutable) - components_list = components_list.copy() - - # Pkgname: allow omitting "linux-" prefix - if pkgname.startswith("linux-"): - flavor = pkgname.split("linux-")[1] - else: - flavor = pkgname - - # Read all kernel configs in the aport - ret = True - aport = pmb.helpers.pmaports.find(args, "linux-" + flavor, must_exist=must_exist) - if aport is None: - return None - apkbuild = pmb.parse.apkbuild(f"{aport}/APKBUILD") - pkgver = apkbuild["pkgver"] - - # We only enforce optional checks for community & main devices - enforce_check = aport.split("/")[-2] in ["community", "main"] - - for name in get_all_component_names(): - if f"pmb:kconfigcheck-{name}" in apkbuild["options"] and \ - name not in components_list: - components_list += [name] - - for config_path in glob.glob(aport + "/config-*"): - # The architecture of the config is in the name, so it just needs to be - # extracted - config_name = os.path.basename(config_path) - config_name_split = config_name.split(".") - - if len(config_name_split) != 2: - raise RuntimeError(f"{config_name} is not a valid kernel config " - "name. Ensure that the _config property in your " - "kernel APKBUILD has a . before the " - "architecture name, e.g. .aarch64 or .armv7, " - "and that there is no excess punctuation " - "elsewhere in the name.") - - config_arch = config_name_split[1] - ret &= check_config(config_path, config_arch, pkgver, components_list, - details=details, enforce_check=enforce_check) - return ret - - -def extract_arch(config_path): - # Extract the architecture out of the config - with open(config_path) as f: - config = f.read() - if is_set(config, "ARM"): - return "armv7" - elif is_set(config, "ARM64"): - return "aarch64" - elif is_set(config, "RISCV"): - return "riscv64" - elif is_set(config, "X86_32"): - return "x86" - elif is_set(config, "X86_64"): - return "x86_64" - - # No match - logging.info("WARNING: failed to extract arch from kernel config") - return "unknown" - - -def extract_version(config_path): - # Try to extract the version string out of the comment header - with open(config_path) as f: - # Read the first 3 lines of the file and get the third line only - text = [next(f) for x in range(3)][2] - ver_match = re.match(r"# Linux/\S+ (\S+) Kernel Configuration", text) - if ver_match: - return ver_match.group(1).replace("-", "_") - - # No match - logging.info("WARNING: failed to extract version from kernel config") - return "unknown" - - -def check_file(config_path, components_list=[], details=False): - """ - Check for necessary kernel config options in a kconfig file. - - :param config_path: full path to kernel config file - :param components_list: what to check for, e.g. ["waydroid", "iwd"] - :param details: print all warnings if True, otherwise one generic warning - :returns: True when the check was successful, False otherwise - """ - arch = extract_arch(config_path) - version = extract_version(config_path) - logging.debug(f"Check kconfig: parsed arch={arch}, version={version} from " - f"file: {config_path}") - return check_config(config_path, arch, version, components_list, - details=details) diff --git a/pmb/parse/version.py b/pmb/parse/version.py deleted file mode 100644 index ae2e84c6..00000000 --- a/pmb/parse/version.py +++ /dev/null @@ -1,309 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import collections - -""" -In order to stay as compatible to Alpine's apk as possible, this code -is heavily based on: - -https://git.alpinelinux.org/cgit/apk-tools/tree/src/version.c -""" - - -def token_value(string): - """ - Return the associated value for a given token string (we parse - through the version string one token at a time). - - :param string: a token string - :returns: integer associated to the token (so we can compare them in - functions further below, a digit (1) looses against a - letter (2), because "letter" has a higher value). - - C equivalent: enum PARTS - """ - order = { - "invalid": -1, - "digit_or_zero": 0, - "digit": 1, - "letter": 2, - "suffix": 3, - "suffix_no": 4, - "revision_no": 5, - "end": 6 - } - return order[string] - - -def next_token(previous, rest): - """ - Parse the next token in the rest of the version string, we're - currently looking at. - - We do *not* get the value of the token, or advance the rest string - beyond the whole token that is what the get_token() function does - (see below). - - :param previous: the token before - :param rest: of the version string - :returns: (next, rest) next is the upcoming token, rest is the - input "rest" string with one leading '.', '_' or '-' - character removed (if there was any). - - C equivalent: next_token() - """ - next = "invalid" - char = rest[:1] - - # Tokes, which do not change rest - if not len(rest): - next = "end" - elif previous in ["digit", "digit_or_zero"] and char.islower(): - next = "letter" - elif previous == "letter" and char.isdigit(): - next = "digit" - elif previous == "suffix" and char.isdigit(): - next = "suffix_no" - - # Tokens, which remove the first character of rest - else: - if char == ".": - next = "digit_or_zero" - elif char == "_": - next = "suffix" - elif rest.startswith("-r"): - next = "revision_no" - rest = rest[1:] - elif char == "-": - next = "invalid" - rest = rest[1:] - - # Validate current token - # Check if the transition from previous to current is valid - if token_value(next) < token_value(previous): - if not ((next == "digit_or_zero" and previous == "digit") or - (next == "suffix" and previous == "suffix_no") or - (next == "digit" and previous == "letter")): - next = "invalid" - return (next, rest) - - -def parse_suffix(rest): - """ - Cut off the suffix of rest (which is now at the beginning of the - rest variable, but regarding the whole version string, it is a - suffix), and return a value integer (so it can be compared later, - "beta" > "alpha" etc). - - :param rest: what is left of the version string that we are - currently parsing, starts with a "suffix" value - (see below for valid suffixes). - :returns: (rest, value, invalid_suffix) - - rest: is the input "rest" string without the suffix - - value: is a signed integer (negative for pre-, - positive for post-suffixes). - - invalid_suffix: is true, when rest does not start - with anything from the suffixes variable. - - C equivalent: get_token(), case TOKEN_SUFFIX - """ - - suffixes = collections.OrderedDict([ - ("pre", ["alpha", "beta", "pre", "rc"]), - ("post", ["cvs", "svn", "git", "hg", "p"]), - ]) - - for name, suffixes in suffixes.items(): - for i, suffix in enumerate(suffixes): - if not rest.startswith(suffix): - continue - rest = rest[len(suffix):] - value = i - if name == "pre": - value = value - len(suffixes) - return (rest, value, False) - return (rest, 0, True) - - -def get_token(previous, rest): - """ - This function does three things: - * get the next token - * get the token value - * cut-off the whole token from rest - - :param previous: the token before - :param rest: of the version string - :returns: (next, value, rest) next is the new token string, - value is an integer for comparing, rest is the rest of the - input string. - - C equivalent: get_token() - """ - # Set defaults - value = 0 - next = "invalid" - invalid_suffix = False - - # Bail out if at the end - if not len(rest): - return ("end", 0, rest) - - # Cut off leading zero digits - if previous == "digit_or_zero" and rest.startswith("0"): - while rest.startswith("0"): - rest = rest[1:] - value -= 1 - next = "digit" - - # Add up numeric values - elif previous in ["digit_or_zero", "digit", "suffix_no", - "revision_no"]: - for i in range(len(rest)): - while len(rest) and rest[0].isdigit(): - value *= 10 - value += int(rest[i]) - rest = rest[1:] - - # Append chars or parse suffix - elif previous == "letter": - value = rest[0] - rest = rest[1:] - elif previous == "suffix": - (rest, value, invalid_suffix) = parse_suffix(rest) - - # Invalid previous token - else: - value = -1 - - # Get the next token (for non-leading zeros) - if not len(rest): - next = "end" - elif next == "invalid" and not invalid_suffix: - (next, rest) = next_token(previous, rest) - - return (next, value, rest) - - -def validate(version): - """ - Check whether one version string is valid. - - :param version: full version string - :returns: True when the version string is valid - - C equivalent: apk_version_validate() - """ - current = "digit" - rest = version - while current != "end": - (current, value, rest) = get_token(current, rest) - if current == "invalid": - return False - return True - - -def compare(a_version, b_version, fuzzy=False): - """ - Compare two versions A and B to find out which one is higher, or if - both are equal. - - :param a_version: full version string A - :param b_version: full version string B - :param fuzzy: treat version strings, which end in different token - types as equal - - :returns: - (a < b): -1 - (a == b): 0 - (a > b): 1 - - C equivalent: apk_version_compare_blob_fuzzy() - """ - - # Defaults - a_token = "digit" - b_token = "digit" - a_value = 0 - b_value = 0 - a_rest = a_version - b_rest = b_version - - # Parse A and B one token at a time, until one string ends, or the - # current token has a different type/value - while (a_token == b_token and a_token not in ["end", "invalid"] and - a_value == b_value): - (a_token, a_value, a_rest) = get_token(a_token, a_rest) - (b_token, b_value, b_rest) = get_token(b_token, b_rest) - - # Compare the values inside the last tokens - if a_value < b_value: - return -1 - if a_value > b_value: - return 1 - - # Equal: When tokens are the same strings, or when the value - # is the same and fuzzy compare is enabled - if a_token == b_token or fuzzy: - return 0 - - # Leading version components and their values are equal, now the - # non-terminating version is greater unless it's a suffix - # indicating pre-release - if a_token == "suffix": - (a_token, a_value, a_rest) = get_token(a_token, a_rest) - if a_value < 0: - return -1 - if b_token == "suffix": - (b_token, b_value, b_rest) = get_token(b_token, b_rest) - if b_value < 0: - return 1 - - # Compare the token value (e.g. digit < letter) - if token_value(a_token) > token_value(b_token): - return -1 - if token_value(a_token) < token_value(b_token): - return 1 - - # The tokens are not the same, but previous checks revealed that it - # is equal anyway (e.g. "1.0" == "1"). - return 0 - - -""" -Convenience functions below are not modeled after apk's version.c. -""" - - -def check_string(a_version, rule): - """ - Compare a version against a check string. This is used in "pmbootstrap - kconfig check", to only require certain options if the pkgver is in a - specified range (#1795). - - :param a_version: "3.4.1" - :param rule: ">=1.0.0" - :returns: True if a_version matches rule, false otherwise. - """ - # Operators and the expected returns of compare(a,b) - operator_results = {">=": [1, 0], - "<": [-1]} - - # Find the operator - b_version = None - expected_results = None - for operator in operator_results: - if rule.startswith(operator): - b_version = rule[len(operator):] - expected_results = operator_results[operator] - break - - # No operator found - if not b_version: - raise RuntimeError("Could not find operator in '" + rule + "'. You" - " probably need to adjust check_string() in" - " pmb/parse/version.py.") - - # Compare - result = compare(a_version, b_version) - return result in expected_results diff --git a/pmb/qemu/__init__.py b/pmb/qemu/__init__.py deleted file mode 100644 index ff4980c0..00000000 --- a/pmb/qemu/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# Copyright 2023 Pablo Castellano -# SPDX-License-Identifier: GPL-3.0-or-later -from pmb.qemu.run import run diff --git a/pmb/qemu/run.py b/pmb/qemu/run.py deleted file mode 100644 index 6f8b999f..00000000 --- a/pmb/qemu/run.py +++ /dev/null @@ -1,382 +0,0 @@ -# Copyright 2023 Pablo Castellano, Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import os -import re -import signal -import shlex -import shutil - -import pmb.build -import pmb.chroot -import pmb.chroot.apk -import pmb.chroot.other -import pmb.chroot.initfs -import pmb.config -import pmb.config.pmaports -import pmb.helpers.run -import pmb.parse.arch -import pmb.parse.cpuinfo - - -def system_image(args): - """ - Returns path to rootfs for specified device. In case that it doesn't - exist, raise and exception explaining how to generate it. - """ - path = f"{args.work}/chroot_native/home/pmos/rootfs/{args.device}.img" - if not os.path.exists(path): - logging.debug("Could not find rootfs: " + path) - raise RuntimeError("The rootfs has not been generated yet, please " - "run 'pmbootstrap install' first.") - return path - - -def create_second_storage(args): - """ - Generate a second storage image if it does not exist. - :returns: path to the image or None - """ - path = f"{args.work}/chroot_native/home/pmos/rootfs/{args.device}-2nd.img" - pmb.helpers.run.root(args, ["touch", path]) - pmb.helpers.run.root(args, ["chmod", "a+w", path]) - resize_image(args, args.second_storage, path) - return path - - -def which_qemu(arch): - """ - Finds the qemu executable or raises an exception otherwise - """ - executable = "qemu-system-" + arch - if shutil.which(executable): - return executable - else: - raise RuntimeError("Could not find the '" + executable + "' executable" - " in your PATH. Please install it in order to" - " run qemu.") - - -def create_gdk_loader_cache(args): - """ - Create a gdk loader cache that can be used for running GTK UIs outside of - the chroot. - """ - gdk_cache_dir = "/usr/lib/gdk-pixbuf-2.0/2.10.0/" - custom_cache_path = gdk_cache_dir + "loaders-pmos-chroot.cache" - rootfs_native = args.work + "/chroot_native" - if os.path.isfile(rootfs_native + custom_cache_path): - return rootfs_native + custom_cache_path - - cache_path = gdk_cache_dir + "loaders.cache" - if not os.path.isfile(rootfs_native + cache_path): - raise RuntimeError("gdk pixbuf cache file not found: " + cache_path) - - pmb.chroot.root(args, ["cp", cache_path, custom_cache_path]) - cmd = ["sed", "-i", "-e", - f"s@\"{gdk_cache_dir}@\"{rootfs_native}{gdk_cache_dir}@", - custom_cache_path] - pmb.chroot.root(args, cmd) - return rootfs_native + custom_cache_path - - -def command_qemu(args, arch, img_path, img_path_2nd=None): - """ - Generate the full qemu command with arguments to run postmarketOS - """ - cmdline = args.deviceinfo["kernel_cmdline"] - if args.cmdline: - cmdline = args.cmdline - - if "video=" not in cmdline: - cmdline += " video=" + args.qemu_video - - logging.debug("Kernel cmdline: " + cmdline) - - port_ssh = str(args.port) - - suffix = "rootfs_" + args.device - rootfs = args.work + "/chroot_" + suffix - flavor = pmb.chroot.other.kernel_flavor_installed(args, suffix) - flavor_suffix = f"-{flavor}" - # Backwards compatibility with old mkinitfs (pma#660) - pmaports_cfg = pmb.config.pmaports.read_config(args) - if pmaports_cfg.get("supported_mkinitfs_without_flavors", False): - flavor_suffix = "" - - # Alpine kernels always have the flavor appended to /boot/vmlinuz - kernel = f"{rootfs}/boot/vmlinuz{flavor_suffix}" - if not os.path.exists(kernel): - kernel = f"{kernel}-{flavor}" - if not os.path.exists(kernel): - raise RuntimeError("failed to find the proper vmlinuz path") - - ncpus = os.cpu_count() - - # QEMU mach-virt's max CPU count is 8, limit it so it will work correctly - # on systems with more than 8 CPUs - if arch != pmb.config.arch_native and ncpus > 8: - ncpus = 8 - - if args.host_qemu: - qemu_bin = which_qemu(arch) - env = {} - command = [qemu_bin] - else: - rootfs_native = args.work + "/chroot_native" - env = {"QEMU_MODULE_DIR": f"{rootfs_native}/usr/lib/qemu", - "GBM_DRIVERS_PATH": f"{rootfs_native}/usr/lib/xorg/modules/dri", - "LIBGL_DRIVERS_PATH": f"{rootfs_native}" - "/usr/lib/xorg/modules/dri"} - - if "gtk" in args.qemu_display: - gdk_cache = create_gdk_loader_cache(args) - env.update({"GTK_THEME": "Default", - "GDK_PIXBUF_MODULE_FILE": gdk_cache, - "XDG_DATA_DIRS": rootfs_native + "/usr/local/share:" + - rootfs_native + "/usr/share"}) - - command = [] - if pmb.config.arch_native in ["aarch64", "armv7"]: - # Workaround for QEMU failing on aarch64 asymmetric multiprocessor - # arch (big/little architecture - # https://en.wikipedia.org/wiki/ARM_big.LITTLE) see - # https://bugs.linaro.org/show_bug.cgi?id=1443 - ncpus_bl = pmb.parse.cpuinfo.arm_big_little_first_group_ncpus() - if ncpus_bl: - ncpus = ncpus_bl - logging.info("QEMU will run on big/little architecture on the" - f" first {ncpus} cores (from /proc/cpuinfo)") - command += [rootfs_native + "/lib/ld-musl-" + - pmb.config.arch_native + ".so.1"] - command += [rootfs_native + "/usr/bin/taskset"] - command += ["-c", "0-" + str(ncpus - 1)] - - command += [rootfs_native + "/lib/ld-musl-" + - pmb.config.arch_native + ".so.1"] - command += ["--library-path=" + rootfs_native + "/lib:" + - rootfs_native + "/usr/lib:" + - rootfs_native + "/usr/lib/pulseaudio"] - command += [rootfs_native + "/usr/bin/qemu-system-" + arch] - command += ["-L", rootfs_native + "/usr/share/qemu/"] - - command += ["-nodefaults"] - # Only boot a kernel/initramfs directly when not doing EFI boot. This - # allows us to load/execute an EFI application on boot, and support - # a wide variety of boot loaders. - if not args.efi: - command += ["-kernel", kernel] - command += ["-initrd", rootfs + "/boot/initramfs" + flavor_suffix] - command += ["-append", shlex.quote(cmdline)] - - command += ["-smp", str(ncpus)] - - command += ["-m", str(args.memory)] - - command += ["-serial"] - if args.qemu_redir_stdio: - command += ["mon:stdio"] - else: - command += ["stdio"] - - command += ["-drive", "file=" + img_path + ",format=raw,if=virtio"] - if img_path_2nd: - command += ["-drive", "file=" + img_path_2nd + ",format=raw,if=virtio"] - - if args.qemu_tablet: - command += ["-device", "virtio-tablet-pci"] - else: - command += ["-device", "virtio-mouse-pci"] - command += ["-device", "virtio-keyboard-pci"] - command += ["-netdev", f"user,id=net,hostfwd=tcp:127.0.0.1:{port_ssh}-:22"] - command += ["-device", "virtio-net-pci,netdev=net"] - - if arch == "x86_64": - command += ["-device", "virtio-vga-gl"] - elif arch == "aarch64": - command += ["-M", "virt"] - command += ["-cpu", "cortex-a57"] - command += ["-device", "virtio-gpu-pci"] - elif arch == "riscv64": - command += ["-M", "virt"] - command += ["-device", "virtio-gpu-pci"] - else: - raise RuntimeError(f"Architecture {arch} not supported by this command" - " yet.") - - if args.efi: - command += ["-drive", - "if=pflash,format=raw,readonly=on,file=/usr/share/OVMF/OVMF.fd"] - - # Kernel Virtual Machine (KVM) support - native = pmb.config.arch_native == args.deviceinfo["arch"] - if args.qemu_kvm and native and os.path.exists("/dev/kvm"): - command += ["-enable-kvm"] - command += ["-cpu", "host"] - else: - logging.info("WARNING: QEMU is not using KVM and will run slower!") - - if args.qemu_cpu: - command += ["-cpu", args.qemu_cpu] - - display = args.qemu_display - if display != "none": - display += ",gl=" + ("on" if args.qemu_gl else "off") - - # Separate -show-cursor option is deprecated. If your host qemu fails here, - # it's old (#1995). - command += ["-display", f"{display},show-cursor=on"] - - # Audio support - if args.qemu_audio: - command += ["-audio", f"{args.qemu_audio},model=hda"] - - return (command, env) - - -def resize_image(args, img_size_new, img_path): - """ - Truncates an image to a specific size. The value must be larger than the - current image size, and it must be specified in MiB or GiB units (powers of - 1024). - - :param img_size_new: new image size in M or G - :param img_path: the path to the image - """ - # Current image size in bytes - img_size = os.path.getsize(img_path) - - # Make sure we have at least 1 integer followed by either M or G - pattern = re.compile("^[0-9]+[M|G]$") - if not pattern.match(img_size_new): - raise RuntimeError("IMAGE_SIZE must be in [M]iB or [G]iB, e.g. 2048M" - " or 2G") - - # Remove M or G and convert to bytes - img_size_new_bytes = int(img_size_new[:-1]) * 1024 * 1024 - - # Convert further for G - if (img_size_new[-1] == "G"): - img_size_new_bytes = img_size_new_bytes * 1024 - - if (img_size_new_bytes >= img_size): - logging.info(f"Resize image to {img_size_new}: {img_path}") - pmb.helpers.run.root(args, ["truncate", "-s", img_size_new, img_path]) - else: - # Convert to human-readable format - # NOTE: We convert to M here, and not G, so that we don't have to - # display a size like 1.25G, since decimal places are not allowed by - # truncate. - # We don't want users thinking they can use decimal numbers, and so in - # this example, they would need to use a size greater then 1280M - # instead. - img_size_str = str(round(img_size / 1024 / 1024)) + "M" - - raise RuntimeError(f"IMAGE_SIZE must be {img_size_str} or greater") - - -def sigterm_handler(number, frame): - raise RuntimeError("pmbootstrap was terminated by another process," - " and killed the QEMU VM it was running.") - - -def install_depends(args, arch): - """ - Install any necessary qemu dependencies in native chroot - """ - depends = [ - "mesa-dri-gallium", - "mesa-egl", - "mesa-gl", - "qemu", - "qemu-audio-alsa", - "qemu-audio-pa", - "qemu-audio-sdl", - "qemu-hw-display-virtio-gpu", - "qemu-hw-display-virtio-gpu-gl", - "qemu-hw-display-virtio-gpu-pci", - "qemu-hw-display-virtio-vga", - "qemu-hw-display-virtio-vga-gl", - "qemu-system-" + arch, - "qemu-ui-gtk", - "qemu-ui-opengl", - "qemu-ui-sdl", - ] - - # QEMU packaging isn't split up as much in 3.12 - channel_cfg = pmb.config.pmaports.read_config_channel(args) - if channel_cfg["branch_aports"] == "3.12-stable": - depends.remove("qemu-hw-display-virtio-gpu") - depends.remove("qemu-hw-display-virtio-gpu-pci") - depends.remove("qemu-hw-display-virtio-vga") - depends.remove("qemu-ui-opengl") - - if args.efi: - depends.append("ovmf") - - pmb.chroot.apk.install(args, depends) - - -def run(args): - """ - Run a postmarketOS image in qemu - """ - if not args.device.startswith("qemu-"): - raise RuntimeError("'pmbootstrap qemu' can be only used with one of " - "the QEMU device packages. Run 'pmbootstrap init' " - "and select the 'qemu' vendor.") - arch = pmb.parse.arch.alpine_to_qemu(args.deviceinfo["arch"]) - - img_path = system_image(args) - img_path_2nd = None - if args.second_storage: - img_path_2nd = create_second_storage(args) - - if not args.host_qemu: - install_depends(args, arch) - logging.info("Running postmarketOS in QEMU VM (" + arch + ")") - - qemu, env = command_qemu(args, arch, img_path, img_path_2nd) - - # Workaround: QEMU runs as local user and needs write permissions in the - # rootfs, which is owned by root - if not os.access(img_path, os.W_OK): - pmb.helpers.run.root(args, ["chmod", "666", img_path]) - - # Resize the rootfs (or show hint) - if args.image_size: - resize_image(args, args.image_size, img_path) - else: - logging.info("NOTE: Run 'pmbootstrap qemu --image-size 2G' to set" - " the rootfs size when you run out of space!") - - # SSH/serial/network hints - logging.info("Connect to the VM:") - logging.info("* (ssh) ssh -p {port} {user}@localhost".format(**vars(args))) - logging.info("* (serial) in this console (stdout/stdin)") - - if args.qemu_redir_stdio: - logging.info("NOTE: Ctrl+C is redirected to the VM! To disable this, " - "run: pmbootstrap config qemu_redir_stdio False") - logging.info("NOTE: To quit QEMU with this option you can use " - "Ctrl-A, X.") - - if args.ui == "none": - logging.warning("WARNING: With UI=none network doesn't work" - " automatically: https://postmarketos.org/qemu-network") - - # Run QEMU and kill it together with pmbootstrap - process = None - try: - signal.signal(signal.SIGTERM, sigterm_handler) - process = pmb.helpers.run.user(args, qemu, output="tui", env=env) - except KeyboardInterrupt: - # In addition to not showing a trace when pressing ^C, let user know - # they can override this behavior: - logging.info("Quitting because Ctrl+C detected.") - logging.info("To override this behavior and have pmbootstrap " - "send Ctrl+C to the VM, run:") - logging.info("$ pmbootstrap config qemu_redir_stdio True") - finally: - if process: - process.terminate() diff --git a/pmb/sideload/__init__.py b/pmb/sideload/__init__.py deleted file mode 100644 index 2d11c6df..00000000 --- a/pmb/sideload/__init__.py +++ /dev/null @@ -1,93 +0,0 @@ -# Copyright 2023 Martijn Braam -# SPDX-License-Identifier: GPL-3.0-or-later -import glob -import os -import logging - -import pmb.helpers.run -import pmb.helpers.run_core -import pmb.parse.apkindex -import pmb.config.pmaports -import pmb.build - - -def scp_abuild_key(args, user, host, port): - """ Copy the building key of the local installation to the target device, - so it trusts the apks that were signed here. - :param user: target device ssh username - :param host: target device ssh hostname - :param port: target device ssh port """ - - keys = glob.glob(os.path.join(args.work, "config_abuild", "*.pub")) - key = keys[0] - key_name = os.path.basename(key) - - logging.info(f"Copying signing key ({key_name}) to {user}@{host}") - command = ['scp', '-P', port, key, f'{user}@{host}:/tmp'] - pmb.helpers.run.user(args, command, output="interactive") - - logging.info(f"Installing signing key at {user}@{host}") - keyname = os.path.join("/tmp", os.path.basename(key)) - remote_cmd = ['sudo', '-p', pmb.config.sideload_sudo_prompt, - '-S', 'mv', '-n', keyname, "/etc/apk/keys/"] - remote_cmd = pmb.helpers.run_core.flat_cmd(remote_cmd) - command = ['ssh', '-t', '-p', port, f'{user}@{host}', remote_cmd] - pmb.helpers.run.user(args, command, output="tui") - - -def ssh_install_apks(args, user, host, port, paths): - """ Copy binary packages via SCP and install them via SSH. - :param user: target device ssh username - :param host: target device ssh hostname - :param port: target device ssh port - :param paths: list of absolute paths to locally stored apks - :type paths: list """ - - remote_paths = [] - for path in paths: - remote_paths.append(os.path.join('/tmp', os.path.basename(path))) - - logging.info(f"Copying packages to {user}@{host}") - command = ['scp', '-P', port] + paths + [f'{user}@{host}:/tmp'] - pmb.helpers.run.user(args, command, output="interactive") - - logging.info(f"Installing packages at {user}@{host}") - add_cmd = ['sudo', '-p', pmb.config.sideload_sudo_prompt, - '-S', 'apk', '--wait', '30', 'add'] + remote_paths - add_cmd = pmb.helpers.run_core.flat_cmd(add_cmd) - clean_cmd = pmb.helpers.run_core.flat_cmd(['rm'] + remote_paths) - command = ['ssh', '-t', '-p', port, f'{user}@{host}', - f'{add_cmd}; rc=$?; {clean_cmd}; exit $rc'] - pmb.helpers.run.user(args, command, output="tui") - - -def sideload(args, user, host, port, arch, copy_key, pkgnames): - """ Build packages if necessary and install them via SSH. - - :param user: target device ssh username - :param host: target device ssh hostname - :param port: target device ssh port - :param arch: target device architecture - :param copy_key: copy the abuild key too - :param pkgnames: list of pkgnames to be built """ - - paths = [] - channel = pmb.config.pmaports.read_config(args)["channel"] - - for pkgname in pkgnames: - data_repo = pmb.parse.apkindex.package(args, pkgname, arch, True) - apk_file = f"{pkgname}-{data_repo['version']}.apk" - host_path = os.path.join(args.work, "packages", channel, arch, - apk_file) - if not os.path.isfile(host_path): - pmb.build.package(args, pkgname, arch, force=True) - - if not os.path.isfile(host_path): - raise RuntimeError(f"The package '{pkgname}' could not be built") - - paths.append(host_path) - - if copy_key: - scp_abuild_key(args, user, host, port) - - ssh_install_apks(args, user, host, port, paths) diff --git a/pmbootstrap.py b/pmbootstrap.py deleted file mode 100755 index 70832ae4..00000000 --- a/pmbootstrap.py +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env python3 -# -*- encoding: UTF-8 -*- -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -# PYTHON_ARGCOMPLETE_OK -import sys -import pmb - -# A convenience wrapper for running pmbootstrap from the git repository. This -# script is not part of the python packaging, so don't add more logic here! -if __name__ == "__main__": - sys.exit(pmb.main()) diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 index 4f99e226..00000000 --- a/pyproject.toml +++ /dev/null @@ -1,5 +0,0 @@ -[tool.ruff] -# E402: module import not on top of file, not possible for testcases -# E722: do not use bare except -ignore=["E402", "E722"] -line-length=100 diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index aa76baec..00000000 --- a/setup.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[bdist_wheel] -universal=0 diff --git a/setup.py b/setup.py deleted file mode 100755 index 74e94e86..00000000 --- a/setup.py +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env python3 - -import re -import ast -import sys - -from setuptools import setup, find_packages -from setuptools.command.test import test as TestCommand - -from codecs import open -from os import path - - -class PyTest(TestCommand): - user_options = [('pytest-args=', 'a', 'Arguments to pass to pytest')] - - def initialize_options(self): - TestCommand.initialize_options(self) - self.pytest_args = '' - - def run_tests(self): - import shlex - import pytest - errno = pytest.main(shlex.split(self.pytest_args)) - sys.exit(errno) - - -here = path.abspath(path.dirname(__file__)) -_version_re = re.compile(r'__version__\s+=\s+(.*)') - -with open(path.join(here, 'pmb/__init__.py'), 'rb') as f: - version = str(ast.literal_eval(_version_re.search( - f.read().decode('utf-8')).group(1))) - -with open(path.join(here, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -setup( - name='pmbootstrap', - version=version, - description='A sophisticated chroot / build / flash tool to ' - 'develop and install postmarketOS', - long_description=long_description, - long_description_content_type='text/markdown', - author='postmarketOS Developers', - author_email='info@postmarketos.org', - url='https://www.postmarketos.org', - license='GPLv3', - python_requires='>=3.7', - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: 3.11', - ], - keywords='postmarketos pmbootstrap', - packages=find_packages(exclude=['aports', 'keys', 'test']), - tests_require=['pytest'], - cmdclass={'test': PyTest}, - extras_require={ - 'completion': ['argcomplete'], - }, - entry_points={ - 'console_scripts': [ - 'pmbootstrap=pmb:main', - ], - }, - include_package_data=True, -) diff --git a/test/pmb_test/__init__.py b/test/pmb_test/__init__.py deleted file mode 100644 index 2d920746..00000000 --- a/test/pmb_test/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import sys - -# Add topdir to import path -topdir = os.path.realpath(os.path.join(os.path.dirname(__file__) + "/../..")) -sys.path.insert(0, topdir) diff --git a/test/pmb_test/const.py b/test/pmb_test/const.py deleted file mode 100644 index 94d78171..00000000 --- a/test/pmb_test/const.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import pmb.config - - -testdata = pmb.config.pmb_src + "/test/testdata" diff --git a/test/pmb_test/git.py b/test/pmb_test/git.py deleted file mode 100644 index 7f1fe8c5..00000000 --- a/test/pmb_test/git.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -""" Common code for git tests """ -import os - -import pmb.helpers.git -import pmb.helpers.run -import shutil - - -def prepare_tmpdir(args, monkeypatch, tmpdir): - """ Prepare git repositories in tmpdir, and override related functions. - - Git repositories: - * local: like local clone of pmaports.git - * remote: emulate a remote repository that we can add to "local", so - we can pass the tracking-remote tests in pmb.helpers.git.pull - * remote2: unexpected remote that pmbootstrap can complain about - - Function overrides: - * pmb.helpers.git.get_path: always return path to "local" repo - * pmb.helpers.git.get_upstream_remote: always return "origin" - - :returns: path_local, run_git - * path_local: path to "local" repo - * run_git(git_args, repo="local"): convenience function """ - # Directory structure - tmpdir = str(tmpdir) - path_local = tmpdir + "/local" - path_remote = tmpdir + "/remote" - path_remote2 = tmpdir + "/remote2" - os.makedirs(path_local) - os.makedirs(path_remote) - os.makedirs(path_remote2) - - def run_git(git_args, repo="local"): - path = tmpdir + "/" + repo - pmb.helpers.run.user(args, ["git"] + git_args, path, "stdout", output_return=True) - - # Remote repos - run_git(["init", "-b", "master", "."], "remote") - run_git(["commit", "--allow-empty", "-m", "commit: remote"], "remote") - run_git(["init", "-b", "master", "."], "remote2") - run_git(["commit", "--allow-empty", "-m", "commit: remote2"], "remote2") - - # Local repo (with master -> origin2/master) - run_git(["init", "-b", "master", "."]) - run_git(["remote", "add", "-f", "origin", path_remote]) - run_git(["remote", "add", "-f", "origin2", path_remote2]) - run_git(["checkout", "-b", "master", "--track", "origin2/master"]) - - # Override get_path() - def get_path(args, name_repo): - return path_local - monkeypatch.setattr(pmb.helpers.git, "get_path", get_path) - - # Override get_upstream_remote() - def get_u_r(args, name_repo): - return "origin" - monkeypatch.setattr(pmb.helpers.git, "get_upstream_remote", get_u_r) - - return path_local, run_git - -def copy_dotgit(args, tmpdir): - shutil.copytree(args.aports + "/.git", tmpdir + "/.git", ignore_dangling_symlinks=True) diff --git a/test/pytest.ini b/test/pytest.ini deleted file mode 100644 index 1b96e6cb..00000000 --- a/test/pytest.ini +++ /dev/null @@ -1,4 +0,0 @@ -[pytest] -addopts = --strict-markers -markers = - skip_ci diff --git a/test/test_apk.py b/test/test_apk.py deleted file mode 100644 index 6cab881c..00000000 --- a/test/test_apk.py +++ /dev/null @@ -1,160 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import fnmatch -import pytest -import sys - -import pmb_test # noqa -import pmb.build -import pmb.chroot.apk - - -@pytest.fixture -def args(tmpdir, request): - import pmb.parse - sys.argv = ["pmbootstrap.py", "init"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_install_build(monkeypatch, args): - func = pmb.chroot.apk.install_build - ret_apkindex_package = None - - def fake_build_package(args, package, arch): - return "build-pkg" - monkeypatch.setattr(pmb.build, "package", fake_build_package) - - def fake_apkindex_package(args, package, arch, must_exist): - return ret_apkindex_package - monkeypatch.setattr(pmb.parse.apkindex, "package", fake_apkindex_package) - - package = "hello-world" - arch = "x86_64" - - # invoked as pmb install, build_pkgs_on_install disabled - args.action = "install" - args.build_pkgs_on_install = False - with pytest.raises(RuntimeError) as e: - func(args, package, arch) - assert "no binary package found" in str(e.value) - - # invoked as pmb install, build_pkgs_on_install disabled, binary exists - args.action = "install" - args.build_pkgs_on_install = False - ret_apkindex_package = {"pkgname": "hello-world"} - assert func(args, package, arch) is None - - # invoked as pmb install, build_pkgs_on_install enabled - args.action = "install" - args.build_pkgs_on_install = True - assert func(args, package, arch) == "build-pkg" - - # invoked as not pmb install - args.action = "chroot" - args.build_pkgs_on_install = False - assert func(args, package, arch) == "build-pkg" - - -def test_packages_split_to_add_del(): - packages = ["hello", "!test", "hello2", "test2", "!test3"] - - to_add, to_del = pmb.chroot.apk.packages_split_to_add_del(packages) - assert to_add == ["hello", "hello2", "test2"] - assert to_del == ["test", "test3"] - - -def test_packages_get_locally_built_apks(monkeypatch, args): - args.assume_yes = True - - arch = pmb.config.arch_native - packages = ["hello-world", # will exist in repo and locally - "postmarketos-base", # will exist in repo only - "package-that-does-not-exist"] # will not exist at all - - pmb.chroot.zap(args, pkgs_local=True) - pmb.build.package(args, "hello-world", force=True) - - ret = pmb.chroot.apk.packages_get_locally_built_apks(args, packages, arch) - assert len(ret) == 1 - assert fnmatch.fnmatch(ret[0], "*/hello-world-*.apk") - - -def test_install_run_apk(monkeypatch, args): - global cmds_progress - global cmds - - func = pmb.chroot.apk.install_run_apk - suffix = "chroot_native" - - def fake_chroot_root(args, command, suffix): - global cmds - cmds += [command] - monkeypatch.setattr(pmb.chroot, "root", fake_chroot_root) - - def fake_apk_progress(args, command, chroot, suffix): - global cmds_progress - cmds_progress += [command] - monkeypatch.setattr(pmb.helpers.apk, "apk_with_progress", fake_apk_progress) - - def reset_cmds(): - global cmds_progress, cmds - cmds = [] - cmds_progress = [] - - # Simple add - reset_cmds() - to_add = ["postmarketos-base", "device-ppp"] - to_add_local = [] - to_del = [] - func(args, to_add, to_add_local, to_del, suffix) - assert cmds_progress == [["apk", "add", "postmarketos-base", "device-ppp", - "--no-interactive"]] - assert cmds == [] - - # Add and delete - reset_cmds() - to_add = ["postmarketos-base", "device-ppp"] - to_add_local = [] - to_del = ["osk-sdl"] - func(args, to_add, to_add_local, to_del, suffix) - assert cmds_progress == [["apk", "add", "postmarketos-base", "device-ppp", - "--no-interactive"]] - assert cmds == [["apk", "--no-progress", "del", "osk-sdl", - "--no-interactive"]] - - # Add with local package - reset_cmds() - to_add = ["postmarketos-base", "device-ppp"] - to_add_local = ["/tmp/device-ppp.apk"] - to_del = [] - func(args, to_add, to_add_local, to_del, suffix) - assert cmds_progress == [["apk", "add", "postmarketos-base", "device-ppp", - "--no-interactive"]] - assert cmds == [["apk", "--no-progress", "add", "-u", "--virtual", - ".pmbootstrap", "/tmp/device-ppp.apk", "--no-interactive"], - ["apk", "--no-progress", "del", ".pmbootstrap", - "--no-interactive"]] - - # Add with --no-network - reset_cmds() - args.offline = True - to_add = ["hello-world"] - to_add_local = [] - to_del = [] - func(args, to_add, to_add_local, to_del, suffix) - assert cmds_progress == [["apk", "--no-network", "add", "hello-world", - "--no-interactive"]] - assert cmds == [] - - # Package name starting with '-' - reset_cmds() - to_add = ["hello-world", "--allow-untrusted"] - to_add_local = [] - to_del = [] - with pytest.raises(ValueError) as e: - func(args, to_add, to_add_local, to_del, suffix) - assert "Invalid package name" in str(e.value) diff --git a/test/test_apk_static.py b/test/test_apk_static.py deleted file mode 100644 index 84530f46..00000000 --- a/test/test_apk_static.py +++ /dev/null @@ -1,131 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import copy -import sys -import tarfile -import glob -import pytest - -import pmb_test # noqa -import pmb.chroot.apk_static -import pmb.config -import pmb.parse.apkindex -import pmb.helpers.logging - - -@pytest.fixture -def args(request): - import pmb.parse - sys.argv = ["pmbootstrap.py", "chroot"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_read_signature_info(args): - # Tempfolder inside chroot for fake apk files - tmp_path = "/tmp/test_read_signature_info" - tmp_path_outside = args.work + "/chroot_native" + tmp_path - if os.path.exists(tmp_path_outside): - pmb.chroot.root(args, ["rm", "-r", tmp_path]) - pmb.chroot.user(args, ["mkdir", "-p", tmp_path]) - - # No signature found - pmb.chroot.user(args, ["tar", "-czf", tmp_path + "/no_sig.apk", - "/etc/issue"]) - with tarfile.open(tmp_path_outside + "/no_sig.apk", "r:gz") as tar: - with pytest.raises(RuntimeError) as e: - pmb.chroot.apk_static.read_signature_info(tar) - assert "Could not find signature" in str(e.value) - - # Signature file with invalid name - pmb.chroot.user(args, ["mkdir", "-p", tmp_path + "/sbin"]) - pmb.chroot.user(args, ["cp", "/etc/issue", tmp_path + - "/sbin/apk.static.SIGN.RSA.invalid.pub"]) - pmb.chroot.user(args, ["tar", "-czf", tmp_path + "/invalid_sig.apk", - "sbin/apk.static.SIGN.RSA.invalid.pub"], - working_dir=tmp_path) - with tarfile.open(tmp_path_outside + "/invalid_sig.apk", "r:gz") as tar: - with pytest.raises(RuntimeError) as e: - pmb.chroot.apk_static.read_signature_info(tar) - assert "Invalid signature key" in str(e.value) - - # Signature file with realistic name - path = glob.glob(pmb.config.apk_keys_path + "/*.pub")[0] - name = os.path.basename(path) - path_archive = "sbin/apk.static.SIGN.RSA." + name - pmb.chroot.user(args, ["mv", - f"{tmp_path}/sbin/apk.static.SIGN.RSA.invalid.pub", - f"{tmp_path}/{path_archive}"]) - pmb.chroot.user(args, ["tar", "-czf", tmp_path + "/realistic_name_sig.apk", - path_archive], working_dir=tmp_path) - with tarfile.open(f"{tmp_path_outside}/realistic_name_sig.apk", "r:gz")\ - as tar: - sigfilename, sigkey_path = pmb.chroot.apk_static.read_signature_info( - tar) - assert sigfilename == path_archive - assert sigkey_path == path - - # Clean up - pmb.chroot.user(args, ["rm", "-r", tmp_path]) - - -def test_successful_extraction(args, tmpdir): - if os.path.exists(args.work + "/apk.static"): - os.remove(args.work + "/apk.static") - - pmb.chroot.apk_static.init(args) - assert os.path.exists(args.work + "/apk.static") - os.remove(args.work + "/apk.static") - - -def test_signature_verification(args, tmpdir): - if os.path.exists(args.work + "/apk.static"): - os.remove(args.work + "/apk.static") - - version = pmb.parse.apkindex.package(args, "apk-tools-static")["version"] - apk_path = pmb.chroot.apk_static.download( - args, f"apk-tools-static-{version}.apk") - - # Extract to temporary folder - with tarfile.open(apk_path, "r:gz") as tar: - sigfilename, sigkey_path = pmb.chroot.apk_static.read_signature_info( - tar) - files = pmb.chroot.apk_static.extract_temp(tar, sigfilename) - - # Verify signature (successful) - pmb.chroot.apk_static.verify_signature(args, files, sigkey_path) - - # Append data to extracted apk.static - with open(files["apk"]["temp_path"], "ab") as handle: - handle.write("appended something".encode()) - - # Verify signature again (fail) (this deletes the tempfiles) - with pytest.raises(RuntimeError) as e: - pmb.chroot.apk_static.verify_signature(args, files, sigkey_path) - assert "Failed to validate signature" in str(e.value) - - # - # Test "apk.static --version" check - # - with pytest.raises(RuntimeError) as e: - pmb.chroot.apk_static.extract(args, "99.1.2-r1", apk_path) - assert "downgrade attack" in str(e.value) - - -def test_outdated_version(args, monkeypatch): - if os.path.exists(args.work + "/apk.static"): - os.remove(args.work + "/apk.static") - - # Change min version for all branches - min_copy = copy.copy(pmb.config.apk_tools_min_version) - for key, old_ver in min_copy.items(): - min_copy[key] = "99.1.2-r1" - monkeypatch.setattr(pmb.config, "apk_tools_min_version", min_copy) - - with pytest.raises(RuntimeError) as e: - pmb.chroot.apk_static.init(args) - assert "outdated version" in str(e.value) diff --git a/test/test_aportgen.py b/test/test_aportgen.py deleted file mode 100644 index 3ab4f757..00000000 --- a/test/test_aportgen.py +++ /dev/null @@ -1,131 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import sys -import pytest -import shutil -import filecmp - -import pmb_test -import pmb_test.git -import pmb_test.const -import pmb.aportgen -import pmb.aportgen.core -import pmb.config -import pmb.helpers.logging - - -@pytest.fixture -def args(tmpdir, request): - import pmb.parse - cfg = f"{pmb_test.const.testdata}/channels.cfg" - sys.argv = ["pmbootstrap.py", "--config-channels", cfg, "chroot"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - args.fork_alpine = False - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_aportgen_compare_output(args, tmpdir, monkeypatch): - # Fake aports folder in tmpdir - tmpdir = str(tmpdir) - pmb_test.git.copy_dotgit(args, tmpdir) - args.aports = tmpdir - os.mkdir(tmpdir + "/cross") - testdata = pmb_test.const.testdata + "/aportgen" - - # Override get_upstream_aport() to point to testdata - def func(args, upstream_path, arch=None): - return testdata + "/aports/main/" + upstream_path - monkeypatch.setattr(pmb.aportgen.core, "get_upstream_aport", func) - - # Run aportgen and compare output - pkgnames = ["gcc-armhf"] - for pkgname in pkgnames: - pmb.aportgen.generate(args, pkgname) - path_new = args.aports + "/cross/" + pkgname + "/APKBUILD" - path_old = testdata + "/pmaports/cross/" + pkgname + "/APKBUILD" - assert os.path.exists(path_new) - assert filecmp.cmp(path_new, path_old, False) - - -def test_aportgen_fork_alpine_compare_output(args, tmpdir, monkeypatch): - # Fake aports folder in tmpdir - tmpdir = str(tmpdir) - pmb_test.git.copy_dotgit(args, tmpdir) - args.aports = tmpdir - os.mkdir(tmpdir + "/temp") - testdata = pmb_test.const.testdata + "/aportgen" - args.fork_alpine = True - - # Override get_upstream_aport() to point to testdata - def func(args, upstream_path, arch=None): - return testdata + "/aports/main/" + upstream_path - monkeypatch.setattr(pmb.aportgen.core, "get_upstream_aport", func) - - # Run aportgen and compare output - pkgname = "binutils" - pmb.aportgen.generate(args, pkgname) - path_new = args.aports + "/temp/" + pkgname + "/APKBUILD" - path_old = testdata + "/pmaports/temp/" + pkgname + "/APKBUILD" - assert os.path.exists(path_new) - assert filecmp.cmp(path_new, path_old, False) - - -def test_aportgen(args, tmpdir): - # Fake aports folder in tmpdir - testdata = pmb_test.const.testdata - tmpdir = str(tmpdir) - pmb_test.git.copy_dotgit(args, tmpdir) - args.aports = tmpdir - shutil.copy(f"{testdata}/pmaports.cfg", args.aports) - os.mkdir(tmpdir + "/cross") - - # Create aportgen folder -> code path where it still exists - pmb.helpers.run.user(args, ["mkdir", "-p", args.work + "/aportgen"]) - - # Generate all valid packages (gcc twice -> different code path) - pkgnames = ["musl-armv7", - "busybox-static-armv7", - "gcc-armv7", - "gcc-armv7"] - for pkgname in pkgnames: - pmb.aportgen.generate(args, pkgname) - - -def test_aportgen_invalid_generator(args): - with pytest.raises(ValueError) as e: - pmb.aportgen.generate(args, "pkgname-with-no-generator") - assert "No generator available" in str(e.value) - - -def test_aportgen_get_upstream_aport(args, monkeypatch): - # Fake pmb.parse.apkbuild() - def fake_apkbuild(*args, **kwargs): - return apkbuild - monkeypatch.setattr(pmb.parse, "apkbuild", fake_apkbuild) - - # Fake pmb.parse.apkindex.package() - def fake_package(*args, **kwargs): - return package - monkeypatch.setattr(pmb.parse.apkindex, "package", fake_package) - - # Equal version - func = pmb.aportgen.core.get_upstream_aport - upstream = "gcc" - upstream_full = args.work + "/cache_git/aports_upstream/main/" + upstream - apkbuild = {"pkgver": "2.0", "pkgrel": "0"} - package = {"version": "2.0-r0"} - assert func(args, upstream) == upstream_full - - # APKBUILD < binary - apkbuild = {"pkgver": "1.0", "pkgrel": "0"} - package = {"version": "2.0-r0"} - assert func(args, upstream) == upstream_full - - # APKBUILD > binary - apkbuild = {"pkgver": "3.0", "pkgrel": "0"} - package = {"version": "2.0-r0"} - assert func(args, upstream) == upstream_full diff --git a/test/test_aportgen_device_wizard.py b/test/test_aportgen_device_wizard.py deleted file mode 100644 index 76f5b1c5..00000000 --- a/test/test_aportgen_device_wizard.py +++ /dev/null @@ -1,182 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import pytest -import sys -import shutil - -import pmb_test # noqa -import pmb_test.git -import pmb_test.const -import pmb.aportgen -import pmb.config -import pmb.helpers.logging -import pmb.parse - - -@pytest.fixture -def args(tmpdir, request): - cfg = f"{pmb_test.const.testdata}/channels.cfg" - sys.argv = ["pmbootstrap.py", "--config-channels", cfg, "build", "-i", - "device-testsuite-testdevice"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - - # Fake aports folder: - tmpdir = str(tmpdir) - pmb_test.git.copy_dotgit(args, tmpdir) - setattr(args, "_aports_real", args.aports) - args.aports = tmpdir - - # Copy the devicepkg-dev package (shared device-* APKBUILD code) - pmb.helpers.run.user(args, ["mkdir", "-p", tmpdir + "/main"]) - path_dev = args._aports_real + "/main/devicepkg-dev" - pmb.helpers.run.user(args, ["cp", "-r", path_dev, tmpdir + "/main"]) - - # Copy the linux-lg-mako aport (we currently copy patches from there) - pmb.helpers.run.user(args, ["mkdir", "-p", tmpdir + "/device/testing"]) - path_mako = args._aports_real + "/device/testing/linux-lg-mako" - pmb.helpers.run.user(args, ["cp", "-r", path_mako, - f"{tmpdir}/device/testing"]) - - # Copy pmaports.cfg - shutil.copy(f"{pmb_test.const.testdata}/pmaports.cfg", args.aports) - return args - - -def generate(args, monkeypatch, answers): - """ - Generate the device-new-device and linux-new-device aports (with a patched - pmb.helpers.cli()). - - :returns: (deviceinfo, apkbuild, apkbuild_linux) - the parsed dictionaries - of the created files, as returned by pmb.parse.apkbuild() and - pmb.parse.deviceinfo(). - """ - # Patched function - def fake_ask(question="Continue?", choices=["y", "n"], default="n", - lowercase_answer=True, validation_regex=None, complete=None): - for substr, answer in answers.items(): - if substr in question: - logging.info(question + ": " + answer) - # raise RuntimeError("test>" + answer) - return answer - raise RuntimeError("This testcase didn't expect the question '" + - question + "', please add it to the mapping.") - - # Generate the aports - monkeypatch.setattr(pmb.helpers.cli, "ask", fake_ask) - pmb.aportgen.generate(args, "device-testsuite-testdevice") - pmb.aportgen.generate(args, "linux-testsuite-testdevice") - monkeypatch.undo() - - apkbuild_path = (f"{args.aports}/device/testing/" - "device-testsuite-testdevice/APKBUILD") - apkbuild_path_linux = (args.aports + "/device/testing/" - "linux-testsuite-testdevice/APKBUILD") - - # The build fails if the email is not a valid email, so remove them just - # for tests - remove_contributor_maintainer_lines(args, apkbuild_path) - remove_contributor_maintainer_lines(args, apkbuild_path_linux) - - # Parse the deviceinfo and apkbuilds - pmb.helpers.other.cache["apkbuild"] = {} - apkbuild = pmb.parse.apkbuild(apkbuild_path) - apkbuild_linux = pmb.parse.apkbuild(apkbuild_path_linux, - check_pkgver=False) - deviceinfo = pmb.parse.deviceinfo(args, "testsuite-testdevice") - return (deviceinfo, apkbuild, apkbuild_linux) - - -def remove_contributor_maintainer_lines(args, path): - with open(path, "r+", encoding="utf-8") as handle: - lines_new = [] - for line in handle.readlines(): - # Skip maintainer/contributor - if line.startswith("# Maintainer") or line.startswith( - "# Contributor"): - continue - lines_new.append(line) - # Write back - handle.seek(0) - handle.write("".join(lines_new)) - handle.truncate() - - -def test_aportgen_device_wizard(args, monkeypatch): - """ - Generate a device-testsuite-testdevice and linux-testsuite-testdevice - package multiple times and check if the output is correct. Also build the - device package once. - """ - # Answers to interactive questions - answers = { - "Device architecture": "armv7", - "external storage": "y", - "hardware keyboard": "n", - "Flash method": "heimdall", - "Manufacturer": "Testsuite", - "Name": "Testsuite Testdevice", - "Year": "1337", - "Chassis": "handset", - "Type": "isorec", - } - - # First run - deviceinfo, apkbuild, apkbuild_linux = generate(args, monkeypatch, answers) - assert apkbuild["pkgname"] == "device-testsuite-testdevice" - assert apkbuild["pkgdesc"] == "Testsuite Testdevice" - assert apkbuild["depends"] == ["linux-testsuite-testdevice", - "postmarketos-base"] - - assert apkbuild_linux["pkgname"] == "linux-testsuite-testdevice" - assert apkbuild_linux["pkgdesc"] == "Testsuite Testdevice kernel fork" - assert apkbuild_linux["arch"] == ["armv7"] - assert apkbuild_linux["_flavor"] == "testsuite-testdevice" - - assert deviceinfo["name"] == "Testsuite Testdevice" - assert deviceinfo["manufacturer"] == answers["Manufacturer"] - assert deviceinfo["arch"] == "armv7" - assert deviceinfo["year"] == "1337" - assert deviceinfo["chassis"] == "handset" - assert deviceinfo["keyboard"] == "false" - assert deviceinfo["external_storage"] == "true" - assert deviceinfo["flash_method"] == "heimdall-isorec" - assert deviceinfo["generate_bootimg"] == "" - assert deviceinfo["generate_legacy_uboot_initfs"] == "" - - # Build the device package - pkgname = "device-testsuite-testdevice" - pmb.build.checksum.update(args, pkgname) - pmb.build.package(args, pkgname, "armv7", force=True) - - # Abort on overwrite confirmation - answers["overwrite"] = "n" - with pytest.raises(RuntimeError) as e: - deviceinfo, apkbuild, apkbuild_linux = generate(args, monkeypatch, - answers) - assert "Aborted." in str(e.value) - - # fastboot (mkbootimg) - answers["overwrite"] = "y" - answers["Flash method"] = "fastboot" - answers["Path"] = "" - deviceinfo, apkbuild, apkbuild_linux = generate(args, monkeypatch, answers) - assert apkbuild["depends"] == ["linux-testsuite-testdevice", - "mkbootimg", - "postmarketos-base"] - - assert deviceinfo["flash_method"] == answers["Flash method"] - assert deviceinfo["generate_bootimg"] == "true" - - # 0xffff (legacy uboot initfs) - answers["Flash method"] = "0xffff" - deviceinfo, apkbuild, apkbuild_linux = generate(args, monkeypatch, answers) - assert apkbuild["depends"] == ["linux-testsuite-testdevice", - "postmarketos-base", - "uboot-tools"] - - assert deviceinfo["generate_legacy_uboot_initfs"] == "true" diff --git a/test/test_arguments.py b/test/test_arguments.py deleted file mode 100644 index ed841118..00000000 --- a/test/test_arguments.py +++ /dev/null @@ -1,27 +0,0 @@ -# SPDX-License-Identifier: GPL-3.0-or-later - -import argparse - -import pytest - -from pmb.parse.arguments import toggle_other_boolean_flags - - -@pytest.fixture -def example_cli_with_flags(): - parser = argparse.ArgumentParser(prog="sample cli") - parser.add_argument("-f1", "--flag1", action="store_true") - parser.add_argument("-f2", "--flag2", action="store_true") - return parser - - -def test_toggle_other_boolean_flags(example_cli_with_flags): - other_flags = ["flag1", "flag2"] - example_cli_with_flags.add_argument( - "-f12", "--flag12", - action=toggle_other_boolean_flags(*other_flags)) - args = example_cli_with_flags.parse_args(['-f12']) - - expected_flags_true = other_flags + ["flag12"] - for flag in expected_flags_true: - assert getattr(args, flag) diff --git a/test/test_bootimg.py b/test/test_bootimg.py deleted file mode 100644 index 847f24bb..00000000 --- a/test/test_bootimg.py +++ /dev/null @@ -1,161 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import sys -import pytest - -import pmb_test -import pmb_test.const -import pmb.chroot.apk_static -import pmb.parse.apkindex -import pmb.helpers.logging -import pmb.parse.bootimg - - -@pytest.fixture -def args(request): - import pmb.parse - sys.argv = ["pmbootstrap.py", "chroot"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_bootimg_invalid_path(args): - with pytest.raises(RuntimeError) as e: - pmb.parse.bootimg(args, "/invalid-path") - assert "Could not find file" in str(e.value) - - -def test_bootimg_kernel(args): - path = pmb_test.const.testdata + "/bootimg/kernel-boot.img" - with pytest.raises(RuntimeError) as e: - pmb.parse.bootimg(args, path) - assert "heimdall-isorec" in str(e.value) - - -def test_bootimg_invalid_file(args): - with pytest.raises(RuntimeError) as e: - pmb.parse.bootimg(args, __file__) - assert "File is not an Android boot.img" in str(e.value) - - -def test_bootimg_normal(args): - path = pmb_test.const.testdata + "/bootimg/normal-boot.img" - output = {"header_version": "0", - "base": "0x80000000", - "kernel_offset": "0x00008000", - "ramdisk_offset": "0x04000000", - "second_offset": "0x00f00000", - "tags_offset": "0x0e000000", - "pagesize": "2048", - "cmdline": "bootopt=64S3,32S1,32S1", - "qcdt": "false", - "dtb_second": "false"} - assert pmb.parse.bootimg(args, path) == output - - -def test_bootimg_qcdt(args): - path = pmb_test.const.testdata + "/bootimg/qcdt-boot.img" - output = {"base": "0x80000000", - "kernel_offset": "0x00008000", - "ramdisk_offset": "0x04000000", - "second_offset": "0x00f00000", - "tags_offset": "0x0e000000", - "pagesize": "2048", - "cmdline": "bootopt=64S3,32S1,32S1", - "qcdt": "true", - "dtb_second": "false"} - assert pmb.parse.bootimg(args, path) == output - - -def test_bootimg_mtk(args): - path = pmb_test.const.testdata + "/bootimg/mtk-boot.img" - output = {"header_version": "0", - "base": "0x10000000", - "kernel_offset": "0x00008000", - "ramdisk_offset": "0x01000000", - "second_offset": "0x00f00000", - "tags_offset": "0x00000100", - "pagesize": "2048", - "mtk_label_kernel": "KERNEL", - "mtk_label_ramdisk": "ROOTFS", - "cmdline": "", - "qcdt": "false", - "dtb_second": "false"} - assert pmb.parse.bootimg(args, path) == output - - -def test_bootimg_mtk_recovery(args): - path = pmb_test.const.testdata + "/bootimg/mtk-boot-recovery.img" - output = {"header_version": "0", - "base": "0x80000000", - "kernel_offset": "0x00008000", - "ramdisk_offset": "0x04000000", - "second_offset": "0x00f00000", - "tags_offset": "0x00000100", - "pagesize": "2048", - "mtk_label_kernel": "KERNEL", - "mtk_label_ramdisk": "ROOTFS", - "cmdline": "", - "qcdt": "false", - "dtb_second": "false"} - assert pmb.parse.bootimg(args, path) == output - - -def test_bootimg_mtk_kernelonly(args): - path = pmb_test.const.testdata + "/bootimg/mtk-boot-kernel-only.img" - output = {"header_version": "0", - "base": "0x10000000", - "kernel_offset": "0x00008000", - "ramdisk_offset": "0x01000000", - "second_offset": "0xf0000000", - "tags_offset": "0x00000100", - "pagesize": "2048", - "mtk_label_kernel": "KERNEL", - "cmdline": "", - "qcdt": "false", - "dtb_second": "false"} - assert pmb.parse.bootimg(args, path) == output - - -def test_bootimg_dtb_second(args): - path = pmb_test.const.testdata + "/bootimg/dtb-second-boot.img" - output = {"header_version": "0", - "base": "0x00000000", - "kernel_offset": "0x00008000", - "ramdisk_offset": "0x02000000", - "second_offset": "0x00f00000", - "tags_offset": "0x00000100", - "pagesize": "2048", - "cmdline": "bootopt=64S3,32S1,32S1", - "qcdt": "false", - "dtb_second": "true"} - assert pmb.parse.bootimg(args, path) == output - - -def test_bootimg_v2(args): - path = pmb_test.const.testdata + "/bootimg/boot-header-v2.img" - output = {"header_version": "2", - "base": "0x40078000", - "kernel_offset": "0x00008000", - "ramdisk_offset": "0x07c08000", - "second_offset": "0x00e10000", - "tags_offset": "0x0bc08000", - "pagesize": "2048", - "dtb_offset": "0x0bc08000", - "cmdline": "bootopt=64S3,32N2,64N2 systempart=/dev/mapper/system", - "qcdt": "false", - "dtb_second": "false"} - assert pmb.parse.bootimg(args, path) == output - - -def test_bootimg_v3(args): - path = pmb_test.const.testdata + "/bootimg/boot-header-v3.img" - output = {"header_version": "3", - "pagesize": "4096", - "cmdline": "twrpfastboot=1", - "qcdt": "false", - "dtb_second": "false"} - assert pmb.parse.bootimg(args, path) == output diff --git a/test/test_build_is_necessary.py b/test/test_build_is_necessary.py deleted file mode 100644 index 89512b95..00000000 --- a/test/test_build_is_necessary.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import sys -import pytest - -import pmb_test # noqa -import pmb.helpers.logging -import pmb.helpers.pmaports - - -@pytest.fixture -def args(request, tmpdir): - import pmb.parse - sys.argv = ["pmbootstrap.py", "chroot"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - - # Create an empty APKINDEX.tar.gz file, so we can use its path and - # timestamp to put test information in the cache. - apkindex_path = str(tmpdir) + "/APKINDEX.tar.gz" - open(apkindex_path, "a").close() - lastmod = os.path.getmtime(apkindex_path) - pmb.helpers.other.cache["apkindex"][apkindex_path] = {"lastmod": lastmod, - "multiple": {}} - return args - - -def cache_apkindex(version): - """ - Modify the cache of the parsed binary package repository's APKINDEX - for the "hello-world" package. - :param version: full version string, includes pkgver and pkgrl (e.g. 1-r2) - """ - apkindex_path = list(pmb.helpers.other.cache["apkindex"].keys())[0] - - providers = pmb.helpers.other.cache[ - "apkindex"][apkindex_path]["multiple"]["hello-world"] - providers["hello-world"]["version"] = version - - -def test_build_is_necessary(args): - # Prepare APKBUILD and APKINDEX data - aport = pmb.helpers.pmaports.find(args, "hello-world") - apkbuild = pmb.parse.apkbuild(f"{aport}/APKBUILD") - apkbuild["pkgver"] = "1" - apkbuild["pkgrel"] = "2" - indexes = list(pmb.helpers.other.cache["apkindex"].keys()) - apkindex_path = indexes[0] - cache = {"hello-world": {"hello-world": {"pkgname": "hello-world", - "version": "1-r2"}}} - pmb.helpers.other.cache["apkindex"][apkindex_path]["multiple"] = cache - - # Binary repo has a newer version - cache_apkindex("999-r1") - assert pmb.build.is_necessary(args, None, apkbuild, indexes) is False - - # Aports folder has a newer version - cache_apkindex("0-r0") - assert pmb.build.is_necessary(args, None, apkbuild, indexes) is True - - # Same version - cache_apkindex("1-r2") - assert pmb.build.is_necessary(args, None, apkbuild, indexes) is False - - -def test_build_is_necessary_no_binary_available(args): - """ - APKINDEX cache is set up to fake an empty APKINDEX, which means that the - hello-world package has not been built yet. - """ - indexes = list(pmb.helpers.other.cache["apkindex"].keys()) - aport = pmb.helpers.pmaports.find(args, "hello-world") - apkbuild = pmb.parse.apkbuild(f"{aport}/APKBUILD") - assert pmb.build.is_necessary(args, None, apkbuild, indexes) is True - - -def test_build_is_necessary_cant_build_pmaport_for_arch(args): - """ pmaport version is higher than Alpine's binary package, but pmaport - can't be built for given arch. (#1897) """ - - apkbuild = {"pkgname": "alpine-base", - "arch": "armhf", # can't build for x86_64! - "pkgver": "9999", - "pkgrel": "0"} - assert pmb.build.is_necessary(args, "x86_64", apkbuild) is False - assert pmb.build.is_necessary(args, "armhf", apkbuild) is True diff --git a/test/test_build_package.py b/test/test_build_package.py deleted file mode 100644 index f9cc73c5..00000000 --- a/test/test_build_package.py +++ /dev/null @@ -1,483 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -""" Tests all functions from pmb.build._package """ -import datetime -import glob -import os -import pytest -import shutil -import sys - -import pmb_test # noqa -import pmb_test.git -import pmb.build -import pmb.build._package -import pmb.config -import pmb.config.init -import pmb.helpers.logging - - -@pytest.fixture -def args(tmpdir, request): - import pmb.parse - sys.argv = ["pmbootstrap", "init"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def return_none(*args, **kwargs): - return None - - -def return_string(*args, **kwargs): - return "some/random/path.apk" - - -def return_true(*args, **kwargs): - return True - - -def return_false(*args, **kwargs): - return False - - -def return_fake_build_depends(*args, **kwargs): - """ - Fake return value for pmb.build._package.build_depends: - depends: ["alpine-base"], depends_built: [] - """ - return (["alpine-base"], []) - - -def args_patched(monkeypatch, argv): - monkeypatch.setattr(sys, "argv", argv) - return pmb.parse.arguments() - - -def test_skip_already_built(args): - func = pmb.build._package.skip_already_built - assert pmb.helpers.other.cache["built"] == {} - assert func("test-package", "armhf") is False - assert pmb.helpers.other.cache["built"] == {"armhf": ["test-package"]} - assert func("test-package", "armhf") is True - - -def test_get_apkbuild(args): - func = pmb.build._package.get_apkbuild - - # Valid aport - pkgname = "postmarketos-base" - assert func(args, pkgname, "x86_64")["pkgname"] == pkgname - - # Valid binary package - assert func(args, "alpine-base", "x86_64") is None - - # Invalid package - with pytest.raises(RuntimeError) as e: - func(args, "invalid-package-name", "x86_64") - assert "Could not find" in str(e.value) - - -def test_check_build_for_arch(monkeypatch, args): - # Fake APKBUILD data - apkbuild = {"pkgname": "testpkgname"} - - def fake_helpers_pmaports_get(args, pkgname): - return apkbuild - monkeypatch.setattr(pmb.helpers.pmaports, "get", fake_helpers_pmaports_get) - - # pmaport with arch exists - func = pmb.build._package.check_build_for_arch - apkbuild["arch"] = ["armhf"] - assert func(args, "testpkgname", "armhf") is True - apkbuild["arch"] = ["noarch"] - assert func(args, "testpkgname", "armhf") is True - apkbuild["arch"] = ["all"] - assert func(args, "testpkgname", "armhf") is True - - # No binary package exists and can't build it - apkbuild["arch"] = ["x86_64"] - with pytest.raises(RuntimeError) as e: - func(args, "testpkgname", "armhf") - assert "Can't build" in str(e.value) - - # pmaport can't be built for x86_64, but binary package exists in Alpine - apkbuild = {"pkgname": "mesa", - "arch": "armhf", - "pkgver": "9999", - "pkgrel": "0"} - assert func(args, "mesa", "x86_64") is False - - -def test_get_depends(monkeypatch): - func = pmb.build._package.get_depends - apkbuild = {"pkgname": "test", "depends": ["a"], "makedepends": ["c", "b"], - "checkdepends": "e", "subpackages": {"d": None}, "options": []} - - # Depends + makedepends - args = args_patched(monkeypatch, ["pmbootstrap", "build", "test"]) - assert func(args, apkbuild) == ["a", "b", "c", "e"] - args = args_patched(monkeypatch, ["pmbootstrap", "install"]) - assert func(args, apkbuild) == ["a", "b", "c", "e"] - - # Ignore depends (-i) - args = args_patched(monkeypatch, ["pmbootstrap", "build", "-i", "test"]) - assert func(args, apkbuild) == ["b", "c", "e"] - - # Package depends on its own subpackage - apkbuild["makedepends"] = ["d"] - args = args_patched(monkeypatch, ["pmbootstrap", "build", "test"]) - assert func(args, apkbuild) == ["a", "e"] - - # Package depends on itself - apkbuild["makedepends"] = ["c", "b", "test"] - args = args_patched(monkeypatch, ["pmbootstrap", "build", "test"]) - assert func(args, apkbuild) == ["a", "b", "c", "e"] - - -def test_build_depends(args, monkeypatch): - # Shortcut and fake apkbuild - func = pmb.build._package.build_depends - apkbuild = {"pkgname": "test", "depends": ["a", "!c"], - "makedepends": ["b"], "checkdepends": [], - "subpackages": {"d": None}, "options": []} - - # No depends built (first makedepends + depends, then only makedepends) - monkeypatch.setattr(pmb.build._package, "package", return_none) - assert func(args, apkbuild, "armhf", True) == (["!c", "a", "b"], []) - - # All depends built (makedepends only) - monkeypatch.setattr(pmb.build._package, "package", return_string) - assert func(args, apkbuild, "armhf", False) == (["!c", "a", "b"], - ["a", "b"]) - - -def test_build_depends_no_binary_error(args, monkeypatch): - # Shortcut and fake apkbuild - func = pmb.build._package.build_depends - apkbuild = {"pkgname": "test", "depends": ["some-invalid-package-here"], - "makedepends": [], "checkdepends": [], "subpackages": {}, - "options": []} - - # pmbootstrap build --no-depends - args.no_depends = True - - # Missing binary package error - with pytest.raises(RuntimeError) as e: - func(args, apkbuild, "armhf", True) - assert str(e.value).startswith("Missing binary package for dependency") - - # All depends exist - apkbuild["depends"] = ["alpine-base"] - assert func(args, apkbuild, "armhf", True) == (["alpine-base"], []) - - -def test_build_depends_binary_outdated(args, monkeypatch): - """ pmbootstrap runs with --no-depends and dependency binary package is - outdated (#1895) """ - # Override pmb.parse.apkindex.package(): pretend hello-world-wrapper is - # missing and hello-world is outdated - func_orig = pmb.parse.apkindex.package - - def func_patch(args, package, *args2, **kwargs): - print(f"func_patch: called for package: {package}") - if package == "hello-world-wrapper": - print("pretending that it does not exist") - return None - if package == "hello-world": - print("pretending that it is outdated") - ret = func_orig(args, package, *args2, **kwargs) - ret["version"] = "0-r0" - return ret - return func_orig(args, package, *args2, **kwargs) - monkeypatch.setattr(pmb.parse.apkindex, "package", func_patch) - - # Build hello-world-wrapper with --no-depends and expect failure - args.no_depends = True - pkgname = "hello-world-wrapper" - arch = "x86_64" - force = False - strict = True - with pytest.raises(RuntimeError) as e: - pmb.build.package(args, pkgname, arch, force, strict) - assert "'hello-world' of 'hello-world-wrapper' is outdated" in str(e.value) - - -def test_is_necessary_warn_depends(args, monkeypatch): - # Shortcut and fake apkbuild - func = pmb.build._package.is_necessary_warn_depends - apkbuild = {"pkgname": "test"} - - # Necessary - monkeypatch.setattr(pmb.build, "is_necessary", return_true) - assert func(args, apkbuild, "armhf", False, []) is True - - # Necessary (strict=True overrides is_necessary()) - monkeypatch.setattr(pmb.build, "is_necessary", return_false) - assert func(args, apkbuild, "armhf", True, []) is True - - # Not necessary (with depends: different code path that prints a warning) - assert func(args, apkbuild, "armhf", False, []) is False - assert func(args, apkbuild, "armhf", False, ["first", "second"]) is False - - -def test_init_buildenv(args, monkeypatch): - # First init native chroot buildenv properly without patched functions - pmb.build.init(args) - - # Disable effects of functions we don't want to test here - monkeypatch.setattr(pmb.build._package, "build_depends", - return_fake_build_depends) - monkeypatch.setattr(pmb.build._package, "is_necessary_warn_depends", - return_true) - monkeypatch.setattr(pmb.chroot.apk, "install", return_none) - - # Shortcut and fake apkbuild - func = pmb.build._package.init_buildenv - apkbuild = {"pkgname": "test", "depends": ["a"], "makedepends": ["b"], - "options": []} - - # Build is necessary (various code paths) - assert func(args, apkbuild, "armhf", strict=True) is True - assert func(args, apkbuild, "armhf", cross="native") is True - - # Build is not necessary (only builds dependencies) - monkeypatch.setattr(pmb.build._package, "is_necessary_warn_depends", - return_false) - assert func(args, apkbuild, "armhf") is False - - -def test_get_pkgver(monkeypatch): - # With original source - func = pmb.build._package.get_pkgver - assert func("1.0", True) == "1.0" - - # Without original source - now = datetime.date(2018, 1, 1) - assert func("1.0", False, now) == "1.0_p20180101000000" - assert func("1.0_git20170101", False, now) == "1.0_p20180101000000" - - -def test_run_abuild(args, monkeypatch): - # Disable effects of functions we don't want to test here - monkeypatch.setattr(pmb.build, "copy_to_buildpath", return_none) - monkeypatch.setattr(pmb.chroot, "user", return_none) - - # Shortcut and fake apkbuild - func = pmb.build._package.run_abuild - apkbuild = {"pkgname": "test", "pkgver": "1", "pkgrel": "2", "options": []} - - # Normal run - output = "armhf/test-1-r2.apk" - env = {"CARCH": "armhf", - "GOCACHE": "/home/pmos/.cache/go-build", - "RUSTC_WRAPPER": "/usr/bin/sccache", - "SUDO_APK": "abuild-apk --no-progress"} - cmd = ["abuild", "-D", "postmarketOS", "-d"] - assert func(args, apkbuild, "armhf") == (output, cmd, env) - - # Force and strict - cmd = ["abuild", "-D", "postmarketOS", "-r", "-f"] - assert func(args, apkbuild, "armhf", True, True) == (output, cmd, env) - - # cross=native - env = {"CARCH": "armhf", - "GOCACHE": "/home/pmos/.cache/go-build", - "RUSTC_WRAPPER": "/usr/bin/sccache", - "SUDO_APK": "abuild-apk --no-progress", - "CROSS_COMPILE": "armv6-alpine-linux-musleabihf-", - "CC": "armv6-alpine-linux-musleabihf-gcc"} - cmd = ["abuild", "-D", "postmarketOS", "-d"] - assert func(args, apkbuild, "armhf", cross="native") == (output, cmd, env) - - -def test_finish(args, monkeypatch): - # Real output path - output = pmb.build.package(args, "hello-world", force=True) - - # Disable effects of functions we don't want to test below - monkeypatch.setattr(pmb.chroot, "user", return_none) - - # Shortcut and fake apkbuild - func = pmb.build._package.finish - apkbuild = {"options": []} - - # Non-existing output path - with pytest.raises(RuntimeError) as e: - func(args, apkbuild, "armhf", "/invalid/path") - assert "Package not found" in str(e.value) - - # Existing output path - func(args, apkbuild, pmb.config.arch_native, output) - - -def test_package(args): - # First build - assert pmb.build.package(args, "hello-world", force=True) - - # Package exists - pmb.helpers.other.cache["built"] = {} - assert pmb.build.package(args, "hello-world") is None - - # Force building again - pmb.helpers.other.cache["built"] = {} - assert pmb.build.package(args, "hello-world", force=True) - - # Build for another architecture - assert pmb.build.package(args, "hello-world", "armhf", force=True) - - # Upstream package, for which we don't have an aport - assert pmb.build.package(args, "alpine-base") is None - - -def test_build_depends_high_level(args, monkeypatch): - """ - "hello-world-wrapper" depends on "hello-world". We build both, then delete - "hello-world" and check that it gets rebuilt correctly again. - """ - # Patch pmb.build.is_necessary() to always build the hello-world package - def fake_build_is_necessary(args, arch, apkbuild, apkindex_path=None): - if apkbuild["pkgname"] == "hello-world": - return True - return pmb.build.other.is_necessary(args, arch, apkbuild, - apkindex_path) - monkeypatch.setattr(pmb.build, "is_necessary", - fake_build_is_necessary) - - # Build hello-world to get its full output path - channel = pmb.config.pmaports.read_config(args)["channel"] - output_hello = pmb.build.package(args, "hello-world") - output_hello_outside = f"{args.work}/packages/{channel}/{output_hello}" - assert os.path.exists(output_hello_outside) - - # Make sure the wrapper exists - pmb.build.package(args, "hello-world-wrapper") - - # Remove hello-world - pmb.helpers.run.root(args, ["rm", output_hello_outside]) - pmb.build.index_repo(args, pmb.config.arch_native) - pmb.helpers.other.cache["built"] = {} - - # Ask to build the wrapper. It should not build the wrapper (it exists, not - # using force), but build/update its missing dependency "hello-world" - # instead. - assert pmb.build.package(args, "hello-world-wrapper") is None - assert os.path.exists(output_hello_outside) - - -def test_build_local_source_high_level(args, tmpdir): - """ - Test building a package with overriding the source code: - pmbootstrap build --src=/some/path hello-world - - We use a copy of the hello-world APKBUILD here that doesn't have the - source files it needs to build included. And we use the original aport - folder as local source folder, so pmbootstrap should take the source files - from there and the build should succeed. - """ - # aports: Add deviceinfo (required by pmbootstrap to start) - tmpdir = str(tmpdir) - aports = tmpdir + "/aports" - aport = aports + "/device/testing/device-" + args.device - os.makedirs(aport) - path_original = pmb.helpers.pmaports.find(args, f"device-{args.device}") - shutil.copy(f"{path_original}/deviceinfo", aport) - - # aports: Add modified hello-world aport (source="", uses $builddir) - aport = aports + "/main/hello-world" - os.makedirs(aport) - shutil.copy(pmb.config.pmb_src + "/test/testdata/build_local_src/APKBUILD", - aport) - - # aports: Add pmaports.cfg, .git - shutil.copy(args.aports + "/pmaports.cfg", aports) - pmb_test.git.copy_dotgit(args, tmpdir) - - # src: Copy hello-world source files - src = tmpdir + "/src" - os.makedirs(src) - shutil.copy(args.aports + "/main/hello-world/Makefile", src) - shutil.copy(args.aports + "/main/hello-world/main.c", src) - - # src: Create unreadable file (rsync should skip it) - unreadable = src + "/_unreadable_file" - shutil.copy(args.aports + "/main/hello-world/main.c", unreadable) - pmb.helpers.run.root(args, ["chown", "root:root", unreadable]) - pmb.helpers.run.root(args, ["chmod", "500", unreadable]) - - # Test native arch and foreign arch chroot - channel = pmb.config.pmaports.read_config(args)["channel"] - for arch in [pmb.config.arch_native, "armhf"]: - # Delete all hello-world --src packages - pattern = f"{args.work}/packages/{channel}/{arch}/hello-world-*_p*.apk" - for path in glob.glob(pattern): - pmb.helpers.run.root(args, ["rm", path]) - assert len(glob.glob(pattern)) == 0 - - # Build hello-world --src package - pmb.helpers.run.user(args, [pmb.config.pmb_src + "/pmbootstrap.py", - "--aports", aports, "build", "--src", src, - "hello-world", "--arch", arch]) - - # Verify that the package has been built and delete it - paths = glob.glob(pattern) - assert len(paths) == 1 - pmb.helpers.run.root(args, ["rm", paths[0]]) - - # Clean up: update index, delete temp folder - pmb.build.index_repo(args, pmb.config.arch_native) - pmb.helpers.run.root(args, ["rm", "-r", tmpdir]) - - -def test_build_abuild_leftovers(args, tmpdir): - """ - Test building a package with having abuild leftovers, that will error if - copied: - pmbootstrap build hello-world - """ - # aports: Add deviceinfo (required by pmbootstrap to start) - tmpdir = str(tmpdir) - aports = f"{tmpdir}/aports" - aport = f"{aports}/device/testing/device-{args.device}" - os.makedirs(aport) - path_original = pmb.helpers.pmaports.find(args, f"device-{args.device}") - shutil.copy(f"{path_original}/deviceinfo", aport) - - # aports: Add modified hello-world aport (source="", uses $builddir) - test_aport = "main/hello-world" - aport = f"{aports}/{test_aport}" - shutil.copytree(f"{args.aports}/{test_aport}", aport) - - # aports: Add pmaports.cfg, .git - shutil.copy(f"{args.aports}/pmaports.cfg", aports) - pmb_test.git.copy_dotgit(args, aports) - - # aport: create abuild dir with broken symlink - src = f"{aport}/src" - os.makedirs(src) - os.symlink("/var/cache/distfiles/non-existent.tar.gz", - f"{src}/broken-tarball-symlink.tar.gz") - - # Delete all hello-world packages - channel = pmb.config.pmaports.read_config(args)["channel"] - pattern = f"{args.work}/packages/{channel}/*/hello-world-*_p*.apk" - for path in glob.glob(pattern): - pmb.helpers.run.root(args, ["rm", path]) - assert len(glob.glob(pattern)) == 0 - - # Build hello-world package - pmb.helpers.run.user(args, [f"{pmb.config.pmb_src}/pmbootstrap.py", - "--aports", aports, "build", "--src", src, - "hello-world", "--arch", pmb.config.arch_native]) - - # Verify that the package has been built and delete it - paths = glob.glob(pattern) - assert len(paths) == 1 - pmb.helpers.run.root(args, ["rm", paths[0]]) - - # Clean up: update index, delete temp folder - pmb.build.index_repo(args, pmb.config.arch_native) - pmb.helpers.run.root(args, ["rm", "-r", tmpdir]) diff --git a/test/test_chroot_interactive_shell.py b/test/test_chroot_interactive_shell.py deleted file mode 100644 index d5c50790..00000000 --- a/test/test_chroot_interactive_shell.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import subprocess -import os -import pmb_test # noqa -import pmb.config - - -def test_chroot_interactive_shell(): - """ - Open a shell with 'pmbootstrap chroot' and pass 'echo hello_world\n' as - stdin. - """ - os.chdir(pmb.config.pmb_src) - ret = subprocess.check_output(["./pmbootstrap.py", "-q", "chroot", "sh"], - timeout=300, input="echo hello_world\n", - universal_newlines=True, - stderr=subprocess.STDOUT) - assert ret == "hello_world\n" - - -def test_chroot_interactive_shell_user(): - """ - Open a shell with 'pmbootstrap chroot' as user, and test the resulting ID. - """ - os.chdir(pmb.config.pmb_src) - ret = subprocess.check_output(["./pmbootstrap.py", "-q", "chroot", - "--user", "sh"], timeout=300, - input="id -un", - universal_newlines=True, - stderr=subprocess.STDOUT) - assert ret == "pmos\n" - - -def test_chroot_arguments(): - """ - Open a shell with 'pmbootstrap chroot' for every architecture, pass - 'uname -m\n' as stdin and check the output - """ - os.chdir(pmb.config.pmb_src) - - for arch in ["armhf", "aarch64", "x86_64"]: - ret = subprocess.check_output(["./pmbootstrap.py", "-q", "chroot", - "-b", arch, "sh"], - timeout=300, - input="uname -m\n", - universal_newlines=True, - stderr=subprocess.STDOUT) - if arch == "armhf": - assert ret == "armv7l\n" - else: - assert ret == arch + "\n" diff --git a/test/test_chroot_mount.py b/test/test_chroot_mount.py deleted file mode 100644 index 2f4b66db..00000000 --- a/test/test_chroot_mount.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -""" Test pmb/chroot/mount.py """ -import os -import pytest -import sys - -import pmb_test # noqa -import pmb.chroot - - -@pytest.fixture -def args(tmpdir, request): - import pmb.parse - sys.argv = ["pmbootstrap", "init"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_chroot_mount(args): - suffix = "native" - mnt_dir = f"{args.work}/chroot_native/mnt/pmbootstrap" - - # Run something in the chroot to have the dirs created - pmb.chroot.root(args, ["true"]) - assert os.path.exists(mnt_dir) - assert os.path.exists(f"{mnt_dir}/packages") - - # Umount everything, like in pmb.install.install_system_image - pmb.helpers.mount.umount_all(args, f"{args.work}/chroot_{suffix}") - - # Remove all /mnt/pmbootstrap dirs - pmb.chroot.remove_mnt_pmbootstrap(args, suffix) - assert not os.path.exists(mnt_dir) - - # Run again: it should not crash - pmb.chroot.remove_mnt_pmbootstrap(args, suffix) diff --git a/test/test_config_init.py b/test/test_config_init.py deleted file mode 100644 index fdbc5e3d..00000000 --- a/test/test_config_init.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import pytest - -import pmb_test # noqa -import pmb.config.init - - -def test_require_programs(monkeypatch): - func = pmb.config.init.require_programs - - # Nothing missing - func() - - # Missing program - invalid = "invalid-program-name-here-asdf" - monkeypatch.setattr(pmb.config, "required_programs", [invalid]) - with pytest.raises(RuntimeError) as e: - func() - assert str(e.value).startswith("Can't find all programs") diff --git a/test/test_config_pmaports.py b/test/test_config_pmaports.py deleted file mode 100644 index 15d0b6cf..00000000 --- a/test/test_config_pmaports.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -""" Test pmb/config/pmaports.py """ -import pytest -import sys - -import pmb_test -import pmb_test.const -import pmb_test.git -import pmb.config -import pmb.config.workdir -import pmb.config.pmaports - - -@pytest.fixture -def args(request): - import pmb.parse - cfg = f"{pmb_test.const.testdata}/channels.cfg" - sys.argv = ["pmbootstrap.py", "--config-channels", cfg, "init"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_switch_to_channel_branch(args, monkeypatch, tmpdir): - path, run_git = pmb_test.git.prepare_tmpdir(args, monkeypatch, tmpdir) - args.aports = path - - # Pretend to have channel=edge in pmaports.cfg - def read_config(args): - return {"channel": "edge"} - monkeypatch.setattr(pmb.config.pmaports, "read_config", read_config) - - # Success: Channel does not change - func = pmb.config.pmaports.switch_to_channel_branch - assert func(args, "edge") is False - - # Fail: git error (could be any error, but here: branch does not exist) - with pytest.raises(RuntimeError) as e: - func(args, "v20.05") - assert str(e.value).startswith("Failed to switch branch") - - # Success: switch channel and change branch - run_git(["checkout", "-b", "v20.05"]) - run_git(["checkout", "master"]) - assert func(args, "v20.05") is True - branch = pmb.helpers.git.rev_parse(args, path, extra_args=["--abbrev-ref"]) - assert branch == "v20.05" - - -def test_read_config_channel(args, monkeypatch): - channel = "edge" - - # Pretend to have a certain channel in pmaports.cfg - def read_config(args): - return {"channel": channel} - monkeypatch.setattr(pmb.config.pmaports, "read_config", read_config) - - # Channel found - func = pmb.config.pmaports.read_config_channel - exp = {"description": "Rolling release channel", - "branch_pmaports": "master", - "branch_aports": "master", - "mirrordir_alpine": "edge"} - assert func(args) == exp - - # Channel not found - channel = "non-existing" - with pytest.raises(RuntimeError) as e: - func(args) - assert "channel was not found in channels.cfg" in str(e.value) diff --git a/test/test_config_user.py b/test/test_config_user.py deleted file mode 100644 index af4c349c..00000000 --- a/test/test_config_user.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import sys -import pytest - -import pmb_test # noqa -import pmb.aportgen -import pmb.config -import pmb.helpers.frontend -import pmb.helpers.logging -import pmb.helpers.run -import pmb.helpers.run_core - - -@pytest.fixture -def args(tmpdir, request): - import pmb.parse - sys.argv = ["pmbootstrap.py", "chroot"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def change_config(monkeypatch, path_config, key, value): - args = args_patched(monkeypatch, ["pmbootstrap.py", "-c", path_config, - "config", key, value]) - pmb.helpers.frontend.config(args) - - -def args_patched(monkeypatch, argv): - monkeypatch.setattr(sys, "argv", argv) - return pmb.parse.arguments() - - -def test_config_user(args, tmpdir, monkeypatch): - # Temporary paths - tmpdir = str(tmpdir) - path_work = tmpdir + "/work" - path_config = tmpdir + "/pmbootstrap.cfg" - - # Generate default config (only uses tmpdir) - cmd = pmb.helpers.run_core.flat_cmd(["./pmbootstrap.py", - "-c", path_config, - "-w", path_work, - "--aports", args.aports, - "init"]) - pmb.helpers.run.user(args, ["sh", "-c", "yes '' | " + cmd], - pmb.config.pmb_src) - - # Load and verify default config - argv = ["pmbootstrap.py", "-c", path_config, "config"] - args_default = args_patched(monkeypatch, argv) - assert args_default.work == path_work - - # Modify jobs count - change_config(monkeypatch, path_config, "jobs", "9000") - assert args_patched(monkeypatch, argv).jobs == "9000" - - # Override jobs count via commandline (-j) - argv_jobs = ["pmbootstrap.py", "-c", path_config, "-j", "1000", "config"] - assert args_patched(monkeypatch, argv_jobs).jobs == "1000" - - # Override a config option with something that evaluates to false - argv_empty = ["pmbootstrap.py", "-c", path_config, "-w", "", - "--details-to-stdout", "config"] - assert args_patched(monkeypatch, argv_empty).work == "" diff --git a/test/test_config_workdir.py b/test/test_config_workdir.py deleted file mode 100644 index b01b8f9b..00000000 --- a/test/test_config_workdir.py +++ /dev/null @@ -1,151 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -""" Test pmb/config/workdir.py """ -import os -import pytest -import sys -import time - -import pmb_test # noqa -import pmb.config -import pmb.config.pmaports -import pmb.config.workdir - - -@pytest.fixture -def args(request): - import pmb.parse - sys.argv = ["pmbootstrap", "init"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_chroot_save_init(args, tmpdir, monkeypatch): - # Override time.time() - def fake_time(): - return 1234567890.1234 - monkeypatch.setattr(time, "time", fake_time) - - # Pretend channel=v20.05 in pmaports.cfg - def read_config(args): - return {"channel": "v20.05"} - monkeypatch.setattr(pmb.config.pmaports, "read_config", read_config) - - args.work = str(tmpdir) - func = pmb.config.workdir.chroot_save_init - func(args, "native") - - expected = ("[chroot-init-dates]\n" - "native = 1234567890\n\n" - "[chroot-channels]\n" - "native = v20.05\n\n") - with open(args.work + "/workdir.cfg", "r") as handle: - assert handle.read() == expected - - # Write again (different code path) - func(args, "buildroot_armhf") - expected = ("[chroot-init-dates]\n" - "native = 1234567890\n" - "buildroot_armhf = 1234567890\n\n" - "[chroot-channels]\n" - "native = v20.05\n" - "buildroot_armhf = v20.05\n\n") - with open(args.work + "/workdir.cfg", "r") as handle: - assert handle.read() == expected - - -def test_chroots_outdated(args, tmpdir, monkeypatch): - args.work = str(tmpdir) - - # Override time.time(): now is "100" - def fake_time(): - return 100.0 - monkeypatch.setattr(time, "time", fake_time) - - # workdir.cfg does not exist - func = pmb.config.workdir.chroots_outdated - assert func(args) is False - - # workdir.cfg is empty file - with open(args.work + "/workdir.cfg", "w") as handle: - handle.write("") - assert func(args) is False - - # Write fake workdir.cfg: native was created at "90" - with open(args.work + "/workdir.cfg", "w") as handle: - handle.write("[chroot-init-dates]\nnative = 90\n\n") - - # Outdated (date_outdated: 90) - monkeypatch.setattr(pmb.config, "chroot_outdated", 10) - assert func(args) is True - - # Not outdated (date_outdated: 89) - monkeypatch.setattr(pmb.config, "chroot_outdated", 11) - assert func(args) is False - - -def test_chroot_check_channel(args, tmpdir, monkeypatch): - func = pmb.config.workdir.chroot_check_channel - args.work = str(tmpdir) - channel = "edge" - - # Pretend to have a certain channel in pmaports.cfg - def read_config(args): - return {"channel": channel} - monkeypatch.setattr(pmb.config.pmaports, "read_config", read_config) - - # workdir.cfg does not exist - with pytest.raises(RuntimeError) as e: - func(args, "native") - assert "Could not figure out on which release channel" in str(e.value) - - # Write workdir.cfg - with open(f"{args.work}/workdir.cfg", "w") as handle: - handle.write("[chroot-channels]\nnative = v20.05\n\n") - - # workdir.cfg: no entry for buildroot_armhf chroot - with pytest.raises(RuntimeError) as e: - func(args, "buildroot_armhf") - assert "Could not figure out on which release channel" in str(e.value) - - # Chroot was created for wrong channel - with pytest.raises(RuntimeError) as e: - func(args, "native") - exp = "created for the 'v20.05' channel, but you are on the 'edge'" - assert exp in str(e.value) - - # Check runs through without raising an exception - channel = "v20.05" - func(args, "native") - - -def test_clean(args, tmpdir): - args.work = str(tmpdir) - - # 0. workdir.cfg does not exist - func = pmb.config.workdir.clean - assert func(args) is None - - # Write fake workdir.cfg - cfg_fake = "[chroot-init-dates]\nnative = 1337\n\n" - with open(args.work + "/workdir.cfg", "w") as handle: - handle.write(cfg_fake) - - # 1. chroot_native dir exists - os.makedirs(args.work + "/chroot_native") - assert func(args) is False - - # workdir.cfg: unchanged - with open(args.work + "/workdir.cfg", "r") as handle: - assert handle.read() == cfg_fake - - # 2. chroot_native dir does not exist - os.rmdir(args.work + "/chroot_native") - assert func(args) is True - - # workdir.cfg: "native" entry removed - with open(args.work + "/workdir.cfg", "r") as handle: - assert handle.read() == "[chroot-init-dates]\n\n" diff --git a/test/test_envkernel.py b/test/test_envkernel.py deleted file mode 100644 index ff5e15ba..00000000 --- a/test/test_envkernel.py +++ /dev/null @@ -1,119 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import sys -import pytest - -import pmb_test -import pmb_test.const -import pmb.aportgen -import pmb.aportgen.core -import pmb.build -import pmb.build.envkernel -import pmb.config -import pmb.helpers.logging - - -@pytest.fixture -def args(tmpdir, request): - import pmb.parse - sys.argv = ["pmbootstrap.py", "init"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_package_kernel_args(args): - args.packages = ["package-one", "package-two"] - with pytest.raises(RuntimeError) as e: - pmb.build.envkernel.package_kernel(args) - assert "--envkernel needs exactly one linux-* package as argument." in \ - str(e.value) - - -def test_find_kbuild_output_dir(): - # Test parsing an APKBUILD - pkgname = "linux-envkernel-test" - path = pmb_test.const.testdata + "/apkbuild/APKBUILD." + pkgname - function_body = pmb.parse.function_body(path, "package") - kbuild_out = pmb.build.envkernel.find_kbuild_output_dir(function_body) - assert kbuild_out == "build" - - # Test full function body - function_body = [ - " install -Dm644 \"$srcdir\"/build/arch/arm/boot/dt.img ", - " \"$pkgdir\"/boot/dt.img", - "", - " install -Dm644 \"$srcdir\"/build/arch/arm/boot/zImage-dtb ", - " \"$pkgdir\"/boot/vmlinuz-$_flavor", - "", - " install -D \"$srcdir\"/build/include/config/kernel.release ", - " \"$pkgdir\"/usr/share/kernel/$_flavor/kernel.release", - "", - " cd \"$srcdir\"/build", - " unset LDFLAGS", - "", - " make ARCH=\"$_carch\" CC=\"${CC:-gcc}\" ", - " KBUILD_BUILD_VERSION=\"$((pkgrel + 1))-Alpine\" ", - " INSTALL_MOD_PATH=\"$pkgdir\" modules_install", - ] - kbuild_out = pmb.build.envkernel.find_kbuild_output_dir(function_body) - assert kbuild_out == "build" - - # Test no kbuild out dir - function_body = [ - " install -Dm644 \"$srcdir\"/arch/arm/boot/zImage ", - " \"$pkgdir\"/boot/vmlinuz-$_flavor", - " install -D \"$srcdir\"/include/config/kernel.release ", - " \"$pkgdir\"/usr/share/kernel/$_flavor/kernel.release", - ] - kbuild_out = pmb.build.envkernel.find_kbuild_output_dir(function_body) - assert kbuild_out == "" - - # Test curly brackets around srcdir - function_body = [ - " install -Dm644 \"${srcdir}\"/build/arch/arm/boot/zImage ", - " \"$pkgdir\"/boot/vmlinuz-$_flavor", - " install -D \"${srcdir}\"/build/include/config/kernel.release ", - " \"$pkgdir\"/usr/share/kernel/$_flavor/kernel.release", - ] - kbuild_out = pmb.build.envkernel.find_kbuild_output_dir(function_body) - assert kbuild_out == "build" - - # Test multiple sub directories - function_body = [ - " install -Dm644 \"${srcdir}\"/sub/dir/arch/arm/boot/zImage-dtb ", - " \"$pkgdir\"/boot/vmlinuz-$_flavor", - " install -D \"${srcdir}\"/sub/dir/include/config/kernel.release ", - " \"$pkgdir\"/usr/share/kernel/$_flavor/kernel.release", - ] - kbuild_out = pmb.build.envkernel.find_kbuild_output_dir(function_body) - assert kbuild_out == "sub/dir" - - # Test no kbuild out dir found - function_body = [ - " install -Dm644 \"$srcdir\"/build/not/found/zImage-dtb ", - " \"$pkgdir\"/boot/vmlinuz-$_flavor", - " install -D \"$srcdir\"/not/found/kernel.release ", - " \"$pkgdir\"/usr/share/kernel/$_flavor/kernel.release", - ] - with pytest.raises(RuntimeError) as e: - kbuild_out = pmb.build.envkernel.find_kbuild_output_dir(function_body) - assert ("Couldn't find a kbuild out directory. Is your APKBUILD messed up?" - " If not, then consider adjusting the patterns in " - "pmb/build/envkernel.py to work with your APKBUILD, or submit an " - "issue.") in str(e.value) - - # Test multiple different kbuild out dirs - function_body = [ - " install -Dm644 \"$srcdir\"/build/arch/arm/boot/zImage-dtb ", - " \"$pkgdir\"/boot/vmlinuz-$_flavor", - " install -D \"$srcdir\"/include/config/kernel.release ", - " \"$pkgdir\"/usr/share/kernel/$_flavor/kernel.release", - ] - with pytest.raises(RuntimeError) as e: - kbuild_out = pmb.build.envkernel.find_kbuild_output_dir(function_body) - assert ("Multiple kbuild out directories found. Can you modify your " - "APKBUILD so it only has one output path? If you can't resolve it," - " please open an issue.") in str(e.value) diff --git a/test/test_file.py b/test/test_file.py deleted file mode 100644 index d59f1220..00000000 --- a/test/test_file.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import sys -import time -import pytest - -import pmb_test # noqa -import pmb.helpers.git -import pmb.helpers.logging -import pmb.parse.version - - -@pytest.fixture -def args(request): - import pmb.parse - sys.argv = ["pmbootstrap.py", "chroot"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_file_is_older_than(args, tmpdir): - # Create a file last modified 10s ago - tempfile = str(tmpdir) + "/test" - pmb.helpers.run.user(args, ["touch", tempfile]) - past = time.time() - 10 - os.utime(tempfile, (-1, past)) - - # Check the bounds - func = pmb.helpers.file.is_older_than - assert func(tempfile, 9) is True - assert func(tempfile, 10) is True - assert func(tempfile, 11) is False diff --git a/test/test_folder_size.py b/test/test_folder_size.py deleted file mode 100644 index d68975bc..00000000 --- a/test/test_folder_size.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import sys -import pytest - -import pmb_test # noqa -import pmb.helpers.logging -import pmb.helpers.other -import pmb.helpers.run - - -@pytest.fixture -def args(request): - import pmb.parse - sys.argv = ["pmbootstrap.py", "chroot"] - args = pmb.parse.arguments() - args.details_to_stdout = True - pmb.helpers.logging.init(args) - return args - - -def test_get_folder_size(args, tmpdir): - # Write five 200 KB files to tmpdir - tmpdir = str(tmpdir) - files = 5 - for i in range(files): - pmb.helpers.run.user(args, ["dd", "if=/dev/zero", "of=" + - tmpdir + "/" + str(i), "bs=1K", - "count=200", "conv=notrunc"]) - - # Check if the size is correct. Unfortunately, the `du` call - # in pmb.helpers.other.folder_size is not very accurate, so we - # allow 30kb of tolerance (good enough for our use case): #760 #1717 - tolerance = 30 - size = 200 * files - result = pmb.helpers.other.folder_size(args, tmpdir) - assert (result < size + tolerance and result > size - tolerance) diff --git a/test/test_frontend.py b/test/test_frontend.py deleted file mode 100644 index 73f651fe..00000000 --- a/test/test_frontend.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import sys -import pytest - -import pmb_test # noqa -import pmb.config -import pmb.parse -import pmb.helpers.frontend -import pmb.helpers.logging - - -def test_build_src_invalid_path(): - sys.argv = ["pmbootstrap.py", "build", "--src=/invalidpath", "hello-world"] - args = pmb.parse.arguments() - - with pytest.raises(RuntimeError) as e: - pmb.helpers.frontend.build(args) - assert str(e.value).startswith("Invalid path specified for --src:") diff --git a/test/test_helpers_git.py b/test/test_helpers_git.py deleted file mode 100644 index 3f5488d3..00000000 --- a/test/test_helpers_git.py +++ /dev/null @@ -1,194 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import sys -import pytest -import shutil -import time - -import pmb_test # noqa -import pmb_test.const -import pmb_test.git -import pmb.helpers.git -import pmb.helpers.logging -import pmb.helpers.run - - -@pytest.fixture -def args(request): - import pmb.parse - cfg = f"{pmb_test.const.testdata}/channels.cfg" - sys.argv = ["pmbootstrap.py", "--config-channels", cfg, "init"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_get_path(args): - func = pmb.helpers.git.get_path - args.work = "/wrk" - args.aports = "/tmp/pmaports" - - assert func(args, "aports_upstream") == "/wrk/cache_git/aports_upstream" - assert func(args, "pmaports") == "/tmp/pmaports" - - -def test_can_fast_forward(args, tmpdir): - tmpdir = str(tmpdir) - func = pmb.helpers.git.can_fast_forward - branch_origin = "fake-branch-origin" - - def run_git(git_args): - pmb.helpers.run.user(args, ["git"] + git_args, tmpdir, "stdout") - - # Create test git repo - run_git(["init", "-b", "master", "."]) - run_git(["commit", "--allow-empty", "-m", "commit on master"]) - run_git(["checkout", "-b", branch_origin]) - run_git(["commit", "--allow-empty", "-m", "commit on branch_origin"]) - run_git(["checkout", "master"]) - - # Can fast-forward - assert func(args, tmpdir, branch_origin) is True - - # Can't fast-forward - run_git(["commit", "--allow-empty", "-m", "commit on master #2"]) - assert func(args, tmpdir, branch_origin) is False - - # Git command fails - with pytest.raises(RuntimeError) as e: - func(args, tmpdir, "invalid-branch") - assert str(e.value).startswith("Unexpected exit code") - - -def test_clean_worktree(args, tmpdir): - tmpdir = str(tmpdir) - func = pmb.helpers.git.clean_worktree - - def run_git(git_args): - pmb.helpers.run.user(args, ["git"] + git_args, tmpdir, "stdout") - - # Create test git repo - run_git(["init", "-b", "master", "."]) - run_git(["commit", "--allow-empty", "-m", "commit on master"]) - - assert func(args, tmpdir) is True - pmb.helpers.run.user(args, ["touch", "test"], tmpdir) - assert func(args, tmpdir) is False - - -def test_get_upstream_remote(args, monkeypatch, tmpdir): - tmpdir = str(tmpdir) - func = pmb.helpers.git.get_upstream_remote - name_repo = "test" - - # Override get_path() - def get_path(args, name_repo): - return tmpdir - monkeypatch.setattr(pmb.helpers.git, "get_path", get_path) - - # Override pmb.config.git_repos - url = "https://postmarketos.org/get-upstream-remote-test.git" - git_repos = {"test": url} - monkeypatch.setattr(pmb.config, "git_repos", git_repos) - - def run_git(git_args): - pmb.helpers.run.user(args, ["git"] + git_args, tmpdir, "stdout") - - # Create git repo - run_git(["init", "-b", "master", "."]) - run_git(["commit", "--allow-empty", "-m", "commit on master"]) - - # No upstream remote - with pytest.raises(RuntimeError) as e: - func(args, name_repo) - assert "could not find remote name for URL" in str(e.value) - - run_git(["remote", "add", "hello", url]) - assert func(args, name_repo) == "hello" - - -def test_parse_channels_cfg(args): - exp = {"meta": {"recommended": "edge"}, - "channels": {"edge": {"description": "Rolling release channel", - "branch_pmaports": "master", - "branch_aports": "master", - "mirrordir_alpine": "edge"}, - "v20.05": {"description": "For workgroups", - "branch_pmaports": "v20.05", - "branch_aports": "3.11-stable", - "mirrordir_alpine": "v3.11"}, - "v21.03": {"description": "Second beta release", - "branch_pmaports": "v21.03", - "branch_aports": "3.13-stable", - "mirrordir_alpine": "v3.13"}}} - assert pmb.helpers.git.parse_channels_cfg(args) == exp - - -def test_pull_non_existing(args): - assert pmb.helpers.git.pull(args, "non-existing-repo-name") == 1 - - -def test_pull(args, monkeypatch, tmpdir): - """ Test pmb.helpers.git.pull """ - path, run_git = pmb_test.git.prepare_tmpdir(args, monkeypatch, tmpdir) - - # Not on official branch - func = pmb.helpers.git.pull - name_repo = "test" - run_git(["checkout", "-b", "inofficial-branch"]) - assert func(args, name_repo) == -1 - - # Workdir is not clean - run_git(["checkout", "master"]) - shutil.copy(__file__, path + "/test.py") - assert func(args, name_repo) == -2 - os.unlink(path + "/test.py") - - # Tracking different remote - assert func(args, name_repo) == -3 - - # Let master track origin/master - run_git(["checkout", "-b", "temp"]) - run_git(["branch", "-D", "master"]) - run_git(["checkout", "-b", "master", "--track", "origin/master"]) - - # Already up to date - assert func(args, name_repo) == 2 - - # Can't fast-forward - run_git(["commit", "--allow-empty", "-m", "test"]) - assert func(args, name_repo) == -4 - - # Fast-forward successfully - run_git(["reset", "--hard", "origin/master"]) - run_git(["commit", "--allow-empty", "-m", "new"], "remote") - assert func(args, name_repo) == 0 - - -def test_is_outdated(tmpdir, monkeypatch): - func = pmb.helpers.git.is_outdated - - # Override time.time(): now is "100" - def fake_time(): - return 100.0 - monkeypatch.setattr(time, "time", fake_time) - - # Create .git/FETCH_HEAD - path = str(tmpdir) - os.mkdir(path + "/.git") - fetch_head = path + "/.git/FETCH_HEAD" - open(fetch_head, "w").close() - - # Set mtime to 90 - os.utime(fetch_head, times=(0, 90)) - - # Outdated (date_outdated: 90) - monkeypatch.setattr(pmb.config, "git_repo_outdated", 10) - assert func(path) is True - - # Not outdated (date_outdated: 89) - monkeypatch.setattr(pmb.config, "git_repo_outdated", 11) - assert func(path) is False diff --git a/test/test_helpers_lint.py b/test/test_helpers_lint.py deleted file mode 100644 index 8f110519..00000000 --- a/test/test_helpers_lint.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import pytest -import shutil -import sys - -import pmb_test -import pmb_test.const -import pmb.helpers.lint -import pmb.helpers.run - - -@pytest.fixture -def args(request): - import pmb.parse - sys.argv = ["pmbootstrap", "lint"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_pmbootstrap_lint(args, tmpdir): - args.aports = tmpdir = str(tmpdir) - - # Create hello-world pmaport in tmpdir - apkbuild_orig = f"{pmb_test.const.testdata}/apkbuild/APKBUILD.lint" - apkbuild_tmp = f"{tmpdir}/hello-world/APKBUILD" - os.makedirs(f"{tmpdir}/hello-world") - shutil.copyfile(apkbuild_orig, apkbuild_tmp) - - # Lint passes - assert pmb.helpers.lint.check(args, ["hello-world"]) == "" - - # Change "pmb:cross-native" to non-existing "pmb:invalid-opt" - pmb.helpers.run.user(args, ["sed", "s/pmb:cross-native/pmb:invalid-opt/g", - "-i", apkbuild_tmp]) - - # Lint error - err_str = "invalid option 'pmb:invalid-opt'" - assert err_str in pmb.helpers.lint.check(args, ["hello-world"]) diff --git a/test/test_helpers_package.py b/test/test_helpers_package.py deleted file mode 100644 index f7317842..00000000 --- a/test/test_helpers_package.py +++ /dev/null @@ -1,137 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import sys -import pytest - -import pmb_test # noqa -import pmb.helpers.logging -import pmb.helpers.package - - -@pytest.fixture -def args(request): - import pmb.parse - sys.argv = ["pmbootstrap", "init"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_helpers_package_get_pmaports_and_cache(args, monkeypatch): - """ Test pmb.helpers.package.get(): find in pmaports, use cached result """ - - # Fake APKBUILD data - def stub(args, pkgname, must_exist): - return {"arch": ["armv7"], - "depends": ["testdepend"], - "pkgname": "testpkgname", - "provides": ["testprovide"], - "options": [], - "checkdepends": [], - "subpackages": {}, - "makedepends": [], - "pkgver": "1.0", - "pkgrel": "1"} - monkeypatch.setattr(pmb.helpers.pmaports, "get", stub) - - package = {"arch": ["armv7"], - "depends": ["testdepend"], - "pkgname": "testpkgname", - "provides": ["testprovide"], - "version": "1.0-r1"} - func = pmb.helpers.package.get - assert func(args, "testpkgname", "armv7") == package - - # Cached result - monkeypatch.delattr(pmb.helpers.pmaports, "get") - assert func(args, "testpkgname", "armv7") == package - - -def test_helpers_package_get_apkindex(args, monkeypatch): - """ Test pmb.helpers.package.get(): find in apkindex """ - - # Fake APKINDEX data - fake_apkindex_data = {"arch": "armv7", - "depends": ["testdepend"], - "pkgname": "testpkgname", - "provides": ["testprovide"], - "version": "1.0-r1"} - - def stub(args, pkgname, arch, must_exist): - if arch != fake_apkindex_data["arch"]: - return None - return fake_apkindex_data - monkeypatch.setattr(pmb.parse.apkindex, "package", stub) - - # Given arch - package = {"arch": ["armv7"], - "depends": ["testdepend"], - "pkgname": "testpkgname", - "provides": ["testprovide"], - "version": "1.0-r1"} - func = pmb.helpers.package.get - assert func(args, "testpkgname", "armv7") == package - - # Other arch - assert func(args, "testpkgname", "x86_64") == package - - -def test_helpers_package_depends_recurse(args): - """ Test pmb.helpers.package.depends_recurse() """ - - # Put fake data into the pmb.helpers.package.get() cache - cache = {"a": {False: {"pkgname": "a", "depends": ["b", "c"]}}, - "b": {False: {"pkgname": "b", "depends": []}}, - "c": {False: {"pkgname": "c", "depends": ["d"]}}, - "d": {False: {"pkgname": "d", "depends": ["b"]}}} - pmb.helpers.other.cache["pmb.helpers.package.get"]["armhf"] = cache - - # Normal runs - func = pmb.helpers.package.depends_recurse - assert func(args, "a", "armhf") == ["a", "b", "c", "d"] - assert func(args, "d", "armhf") == ["b", "d"] - - # Cached result - pmb.helpers.other.cache["pmb.helpers.package.get"]["armhf"] = {} - assert func(args, "d", "armhf") == ["b", "d"] - - -def test_helpers_package_check_arch_package(args): - """ Test pmb.helpers.package.check_arch(): binary = True """ - # Put fake data into the pmb.helpers.package.get() cache - func = pmb.helpers.package.check_arch - cache = {"a": {False: {"arch": []}}} - pmb.helpers.other.cache["pmb.helpers.package.get"]["armhf"] = cache - - cache["a"][False]["arch"] = ["all !armhf"] - assert func(args, "a", "armhf") is False - - cache["a"][False]["arch"] = ["all"] - assert func(args, "a", "armhf") is True - - cache["a"][False]["arch"] = ["noarch"] - assert func(args, "a", "armhf") is True - - cache["a"][False]["arch"] = ["armhf"] - assert func(args, "a", "armhf") is True - - cache["a"][False]["arch"] = ["aarch64"] - assert func(args, "a", "armhf") is False - - -def test_helpers_package_check_arch_pmaports(args, monkeypatch): - """ Test pmb.helpers.package.check_arch(): binary = False """ - func = pmb.helpers.package.check_arch - fake_pmaport = {"arch": []} - - def fake_pmaports_get(args, pkgname, must_exist=False): - return fake_pmaport - monkeypatch.setattr(pmb.helpers.pmaports, "get", fake_pmaports_get) - - fake_pmaport["arch"] = ["armhf"] - assert func(args, "a", "armhf", False) is True - - fake_pmaport["arch"] = ["all", "!armhf"] - assert func(args, "a", "armhf", False) is False diff --git a/test/test_helpers_pmaports.py b/test/test_helpers_pmaports.py deleted file mode 100644 index a22ad5c0..00000000 --- a/test/test_helpers_pmaports.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import os -import pytest -import sys - -import pmb_test # noqa -import pmb.build.other - - -@pytest.fixture -def args(request): - import pmb.parse - sys.argv = ["pmbootstrap", "init"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_guess_main(args, tmpdir): - # Fake pmaports folder - tmpdir = str(tmpdir) - args.aports = tmpdir - for aport in ["temp/qemu", "main/some-pkg"]: - os.makedirs(tmpdir + "/" + aport) - with open(tmpdir + "/" + aport + "/APKBUILD", 'w'): - pass - - func = pmb.helpers.pmaports.guess_main - assert func(args, "qemu-x86_64") == tmpdir + "/temp/qemu" - assert func(args, "qemu-system-x86_64") == tmpdir + "/temp/qemu" - assert func(args, "some-pkg-sub-pkg") == tmpdir + "/main/some-pkg" - assert func(args, "qemuPackageWithoutDashes") is None - - -def test_guess_main_dev(args, tmpdir): - # Fake pmaports folder - tmpdir = str(tmpdir) - args.aports = tmpdir - os.makedirs(tmpdir + "/temp/plasma") - with open(tmpdir + "/temp/plasma/APKBUILD", 'w'): - pass - - func = pmb.helpers.pmaports.guess_main_dev - assert func(args, "plasma-framework-dev") is None - assert func(args, "plasma-dev") == tmpdir + "/temp/plasma" - - func = pmb.helpers.pmaports.guess_main - assert func(args, "plasma-framework-dev") is None - assert func(args, "plasma-randomsubpkg") == tmpdir + "/temp/plasma" diff --git a/test/test_helpers_repo.py b/test/test_helpers_repo.py deleted file mode 100644 index 0b180ad2..00000000 --- a/test/test_helpers_repo.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -""" Test pmb.helpers.repo """ -import pytest -import sys - -import pmb_test # noqa -import pmb_test.const -import pmb.helpers.repo - - -@pytest.fixture -def args(tmpdir, request): - import pmb.parse - cfg = f"{pmb_test.const.testdata}/channels.cfg" - sys.argv = ["pmbootstrap.py", "--config-channels", cfg, "chroot"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_hash(): - url = "https://nl.alpinelinux.org/alpine/edge/testing" - hash = "865a153c" - assert pmb.helpers.repo.hash(url, 8) == hash - - -def test_alpine_apkindex_path(args): - func = pmb.helpers.repo.alpine_apkindex_path - args.mirror_alpine = "http://dl-cdn.alpinelinux.org/alpine/" - ret = args.work + "/cache_apk_armhf/APKINDEX.30e6f5af.tar.gz" - assert func(args, "testing", "armhf") == ret - - -def test_urls(args, monkeypatch): - func = pmb.helpers.repo.urls - channel = "v20.05" - args.mirror_alpine = "http://localhost/alpine/" - - # Second mirror with /master at the end is legacy, gets fixed by func. - # Note that bpo uses multiple postmarketOS mirrors at the same time, so it - # can use its WIP repository together with the final repository. - args.mirrors_postmarketos = ["http://localhost/pmos1/", - "http://localhost/pmos2/master"] - - # Pretend to have a certain channel in pmaports.cfg - def read_config(args): - return {"channel": channel} - monkeypatch.setattr(pmb.config.pmaports, "read_config", read_config) - - # Channel: v20.05 - assert func(args) == ["/mnt/pmbootstrap/packages", - "http://localhost/pmos1/v20.05", - "http://localhost/pmos2/v20.05", - "http://localhost/alpine/v3.11/main", - "http://localhost/alpine/v3.11/community"] - - # Channel: edge (has Alpine's testing) - channel = "edge" - assert func(args) == ["/mnt/pmbootstrap/packages", - "http://localhost/pmos1/master", - "http://localhost/pmos2/master", - "http://localhost/alpine/edge/main", - "http://localhost/alpine/edge/community", - "http://localhost/alpine/edge/testing"] - - # Only Alpine's URLs - exp = ["http://localhost/alpine/edge/main", - "http://localhost/alpine/edge/community", - "http://localhost/alpine/edge/testing"] - assert func(args, False, False) == exp diff --git a/test/test_helpers_repo_missing.py b/test/test_helpers_repo_missing.py deleted file mode 100644 index 0b457279..00000000 --- a/test/test_helpers_repo_missing.py +++ /dev/null @@ -1,138 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import pytest -import sys - -import pmb_test # noqa -import pmb.build.other - - -@pytest.fixture -def args(request): - import pmb.parse - sys.argv = ["pmbootstrap", "init"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_filter_missing_packages_invalid(args): - """ Test ...repo_missing.filter_missing_packages(): invalid package """ - func = pmb.helpers.repo_missing.filter_missing_packages - with pytest.raises(RuntimeError) as e: - func(args, "armhf", ["invalid-package-name"]) - assert str(e.value).startswith("Could not find aport") - - -def test_filter_missing_packages_binary_exists(args): - """ Test ...repo_missing.filter_missing_packages(): binary exists """ - func = pmb.helpers.repo_missing.filter_missing_packages - assert func(args, "armhf", ["busybox"]) == [] - - -def test_filter_missing_packages_pmaports(args, monkeypatch): - """ Test ...repo_missing.filter_missing_packages(): pmaports """ - build_is_necessary = None - func = pmb.helpers.repo_missing.filter_missing_packages - - def stub(args, arch, pmaport): - return build_is_necessary - monkeypatch.setattr(pmb.build, "is_necessary", stub) - - build_is_necessary = True - assert func(args, "x86_64", ["busybox", "hello-world"]) == ["hello-world"] - - build_is_necessary = False - assert func(args, "x86_64", ["busybox", "hello-world"]) == [] - - -def test_filter_aport_packages(args): - """ Test ...repo_missing.filter_aport_packages() """ - func = pmb.helpers.repo_missing.filter_aport_packages - assert func(args, "armhf", ["busybox", "hello-world"]) == ["hello-world"] - - -def test_filter_arch_packages(args, monkeypatch): - """ Test ...repo_missing.filter_arch_packages() """ - func = pmb.helpers.repo_missing.filter_arch_packages - check_arch = None - - def stub(args, arch, pmaport, binary=True): - return check_arch - monkeypatch.setattr(pmb.helpers.package, "check_arch", stub) - - check_arch = False - assert func(args, "armhf", ["hello-world"]) == [] - - check_arch = True - assert func(args, "armhf", []) == [] - - -def test_get_relevant_packages(args, monkeypatch): - """ Test ...repo_missing.get_relevant_packages() """ - - # Set up fake return values - stub_data = {"check_arch": False, - "depends_recurse": ["a", "b", "c", "d"], - "filter_arch_packages": ["a", "b", "c"], - "filter_aport_packages": ["b", "a"], - "filter_missing_packages": ["a"]} - - def stub(args, arch, pmaport, binary=True): - return stub_data["check_arch"] - monkeypatch.setattr(pmb.helpers.package, "check_arch", stub) - - def stub(args, arch, pmaport): - return stub_data["depends_recurse"] - monkeypatch.setattr(pmb.helpers.package, "depends_recurse", stub) - - def stub(args, arch, pmaport): - return stub_data["filter_arch_packages"] - monkeypatch.setattr(pmb.helpers.repo_missing, "filter_arch_packages", stub) - - def stub(args, arch, pmaport): - return stub_data["filter_aport_packages"] - monkeypatch.setattr(pmb.helpers.repo_missing, "filter_aport_packages", - stub) - - def stub(args, arch, pmaport): - return stub_data["filter_missing_packages"] - monkeypatch.setattr(pmb.helpers.repo_missing, "filter_missing_packages", - stub) - - # No given package - func = pmb.helpers.repo_missing.get_relevant_packages - assert func(args, "armhf") == ["a"] - assert func(args, "armhf", built=True) == ["a", "b"] - - # Package can't be built for given arch - with pytest.raises(RuntimeError) as e: - func(args, "armhf", "a") - assert "can't be built" in str(e.value) - - # Package can be built for given arch - stub_data["check_arch"] = True - assert func(args, "armhf", "a") == ["a"] - assert func(args, "armhf", "a", True) == ["a", "b"] - - -def test_generate_output_format(args, monkeypatch): - """ Test ...repo_missing.generate_output_format() """ - - def stub(args, pkgname, arch, replace_subpkgnames=False): - return {"pkgname": "hello-world", "version": "1.0-r0", - "depends": ["depend1", "depend2"]} - monkeypatch.setattr(pmb.helpers.package, "get", stub) - - def stub(args, pkgname): - return "main" - monkeypatch.setattr(pmb.helpers.pmaports, "get_repo", stub) - - func = pmb.helpers.repo_missing.generate_output_format - ret = [{"pkgname": "hello-world", - "repo": "main", - "version": "1.0-r0", - "depends": ["depend1", "depend2"]}] - assert func(args, "armhf", ["hello-world"]) == ret diff --git a/test/test_helpers_status.py b/test/test_helpers_status.py deleted file mode 100644 index 7dbd95f4..00000000 --- a/test/test_helpers_status.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -""" Test pmb/helpers/status.py """ -import os -import pytest -import shutil -import sys - -import pmb_test -import pmb_test.git -import pmb.config -import pmb.config.workdir - - -@pytest.fixture -def args(request): - import pmb.parse - sys.argv = ["pmbootstrap", "init"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_pmbootstrap_status(args, tmpdir): - """ High level testing of 'pmbootstrap status': run it twice, once with - a fine workdir, and once where one check is failing. """ - # Prepare empty workdir - work = str(tmpdir) - with open(work + "/version", "w") as handle: - handle.write(str(pmb.config.work_version)) - - # "pmbootstrap status" succeeds (pmb.helpers.run.user verifies exit 0) - pmbootstrap = pmb.config.pmb_src + "/pmbootstrap.py" - pmb.helpers.run.user(args, [pmbootstrap, "-w", work, "status", - "--details"]) - - # Mark chroot_native as outdated - with open(work + "/workdir.cfg", "w") as handle: - handle.write("[chroot-init-dates]\nnative = 1234\n") - - # "pmbootstrap status" fails - ret = pmb.helpers.run.user(args, [pmbootstrap, "-w", work, "status"], - check=False) - assert ret == 1 - - -def test_print_checks_git_repo(args, monkeypatch, tmpdir): - """ Test pmb.helpers.status.print_checks_git_repo """ - path, run_git = pmb_test.git.prepare_tmpdir(args, monkeypatch, tmpdir) - - # Not on official branch - func = pmb.helpers.status.print_checks_git_repo - name_repo = "test" - run_git(["checkout", "-b", "inofficial-branch"]) - status, _ = func(args, name_repo) - assert status == -1 - - # Workdir is not clean - run_git(["checkout", "master"]) - shutil.copy(__file__, path + "/test.py") - status, _ = func(args, name_repo) - assert status == -2 - os.unlink(path + "/test.py") - - # Tracking different remote - status, _ = func(args, name_repo) - assert status == -3 - - # Let master track origin/master - run_git(["checkout", "-b", "temp"]) - run_git(["branch", "-D", "master"]) - run_git(["checkout", "-b", "master", "--track", "origin/master"]) - - # Not up to date - run_git(["commit", "--allow-empty", "-m", "new"], "remote") - run_git(["fetch"]) - status, _ = func(args, name_repo) - assert status == -4 - - # Up to date - run_git(["pull"]) - status, _ = func(args, name_repo) - assert status == 0 - - # Outdated remote information - def is_outdated(path): - return True - monkeypatch.setattr(pmb.helpers.git, "is_outdated", is_outdated) - status, _ = func(args, name_repo) - assert status == -5 diff --git a/test/test_helpers_ui.py b/test/test_helpers_ui.py deleted file mode 100644 index f803e305..00000000 --- a/test/test_helpers_ui.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import pytest -import sys - -import pmb_test -import pmb_test.const -import pmb.helpers.logging -import pmb.helpers.ui - - -@pytest.fixture -def args(tmpdir, request): - import pmb.parse - cfg = f"{pmb_test.const.testdata}/channels.cfg" - sys.argv = ["pmbootstrap.py", "--config-channels", cfg, "init"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_helpers_ui(args): - """ Test the UIs returned by pmb.helpers.ui.list() with a testdata pmaports - dir. That test dir has a plasma-mobile UI, which is disabled for armhf, - so it must not be returned when querying the UI list for armhf. """ - args.aports = f"{pmb_test.const.testdata}/helpers_ui/pmaports" - func = pmb.helpers.ui.list - none_desc = "Bare minimum OS image for testing and manual" \ - " customization. The \"console\" UI should be selected if" \ - " a graphical UI is not desired." - assert func(args, "armhf") == [("none", none_desc)] - assert func(args, "x86_64") == [("none", none_desc), - ("plasma-mobile", "cool pkgdesc")] diff --git a/test/test_install.py b/test/test_install.py deleted file mode 100644 index 7b379107..00000000 --- a/test/test_install.py +++ /dev/null @@ -1,178 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import pytest -import sys -import os -import shutil - -import pmb_test -import pmb_test.const -import pmb.aportgen.device -import pmb.config -import pmb.config.init -import pmb.helpers.logging -import pmb.install._install - - -@pytest.fixture -def args(tmpdir, request): - import pmb.parse - sys.argv = ["pmbootstrap.py", "init"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_get_nonfree_packages(args): - args.aports = pmb_test.const.testdata + "/init_questions_device/aports" - func = pmb.install._install.get_nonfree_packages - - # Device without any non-free subpackages - args.nonfree_firmware = True - args.nonfree_userland = True - assert func(args, "lg-mako") == [] - - # Device with non-free firmware and userland - device = "nonfree-firmware-and-userland" - assert func(args, device) == ["device-" + device + "-nonfree-firmware", - "device-" + device + "-nonfree-userland"] - - # Device with non-free userland - device = "nonfree-userland" - assert func(args, device) == ["device-" + device + "-nonfree-userland"] - - # Device with non-free userland (but user disabled it init) - args.nonfree_userland = False - assert func(args, device) == [] - - -def test_get_recommends(args): - args.aports = pmb_test.const.testdata + "/pmb_recommends" - func = pmb.install._install.get_recommends - - # UI: none - args.install_recommends = True - assert func(args, ["postmarketos-ui-none"]) == [] - - # UI: test, --no-recommends - args.install_recommends = False - assert func(args, ["postmarketos-ui-test"]) == [] - - # UI: test - args.install_recommends = True - assert func(args, ["postmarketos-ui-test"]) == ["plasma-camera", - "plasma-angelfish"] - - # UI: test + test-extras - args.install_recommends = True - assert func(args, ["postmarketos-ui-test", - "postmarketos-ui-test-extras"]) == ["plasma-camera", - "plasma-angelfish", - "buho", "kaidan", - "test-app", "foot", - "htop"] - # Non-UI package - args.install_recommends = True - args.ui_extras = False - assert func(args, ["test-app"]) == ["foot", "htop"] - - -def test_get_groups(args): - args.aports = f"{pmb_test.const.testdata}/pmb_groups" - func = pmb.install.ui.get_groups - - # UI: none: - args.ui = "none" - assert func(args) == [] - - # UI: test, without -extras - args.ui = "test" - args.ui_extras = False - assert func(args) == ["feedbackd"] - - # UI: test, with -extras - args.ui = "test" - args.ui_extras = True - assert func(args) == ["feedbackd", "extra"] - - # UI: invalid - args.ui = "invalid" - with pytest.raises(RuntimeError) as e: - func(args) - assert str(e.value).startswith("Could not find aport for package") - - -def test_generate_binary_list(args): - suffix = "mysuffix" - args.work = "/tmp" - func = pmb.install._install.generate_binary_list - binary_dir = os.path.join(args.work, f"chroot_{suffix}", "usr/share") - os.makedirs(binary_dir, exist_ok=True) - step = 1024 - binaries = [f"{pmb_test.const.testdata}/pmb_install/small.bin", - f"{pmb_test.const.testdata}/pmb_install/full.bin", - f"{pmb_test.const.testdata}/pmb_install/big.bin", - f"{pmb_test.const.testdata}/pmb_install/overrun.bin", - f"{pmb_test.const.testdata}/pmb_install/binary2.bin"] - for b in binaries: - shutil.copy(b, binary_dir) - - # Binary that is small enough to fit the partition of 10 blocks - # of 512 bytes each - binaries = "small.bin:1,binary2.bin:11" - args.deviceinfo = {"sd_embed_firmware": binaries, - "boot_part_start": "128"} - assert func(args, suffix, step) == [('small.bin', 1), ('binary2.bin', 11)] - - # Binary that is fully filling the partition of 10 blocks of 512 bytes each - binaries = "full.bin:1,binary2.bin:11" - args.deviceinfo = {"sd_embed_firmware": binaries, - "boot_part_start": "128"} - assert func(args, suffix, step) == [('full.bin', 1), ('binary2.bin', 11)] - - # Binary that is too big to fit the partition of 10 blocks - # of 512 bytes each - binaries = "big.bin:1,binary2.bin:2" - args.deviceinfo = {"sd_embed_firmware": binaries, - "boot_part_start": "128"} - with pytest.raises(RuntimeError) as e: - func(args, suffix, step) - assert str(e.value).startswith("The firmware overlaps with at least one") - - # Binary that overruns the first partition - binaries = "overrun.bin:1" - args.deviceinfo = {"sd_embed_firmware": binaries, - "boot_part_start": "1"} - with pytest.raises(RuntimeError) as e: - func(args, suffix, step) - assert str(e.value).startswith("The firmware is too big to embed in") - - # Binary does not exist - binaries = "does-not-exist.bin:1,binary2.bin:11" - args.deviceinfo = {"sd_embed_firmware": binaries, - "boot_part_start": "128"} - with pytest.raises(RuntimeError) as e: - func(args, suffix, step) - assert str(e.value).startswith("The following firmware binary does not") - - # Binaries are touching but not overlapping - # boot_part_start is at 2 sectors (1024 b) - # |-----|---------------------|-------------------|------------------- - # | … | binary2.bin (100 b) | small.bin (600 b) | /boot part start … - # |-----|---------------------|-------------------|------------------- - # 0 324 424 1024 - step = 1 - binaries = "binary2.bin:324,small.bin:424" - args.deviceinfo = {"sd_embed_firmware": binaries, - "boot_part_start": "2"} - assert func(args, suffix, step) == [('binary2.bin', 324), - ('small.bin', 424)] - - # Same layout written with different order in sd_embed_firmware - binaries = "small.bin:424,binary2.bin:324" - args.deviceinfo = {"sd_embed_firmware": binaries, - "boot_part_start": "2"} - assert func(args, suffix, step) == [('small.bin', 424), - ('binary2.bin', 324)] diff --git a/test/test_mount.py b/test/test_mount.py deleted file mode 100644 index 502a3473..00000000 --- a/test/test_mount.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import pmb_test # noqa -import pmb.helpers.mount - - -def test_umount_all_list(tmpdir): - # Write fake mounts file - fake_mounts = str(tmpdir + "/mounts") - with open(fake_mounts, "w") as handle: - handle.write("source /test/var/cache\n") - handle.write("source /test/home/pmos/packages\n") - handle.write("source /test\n") - handle.write("source /test/proc\n") - handle.write("source /test/dev/loop0p2\\040(deleted)\n") - - ret = pmb.helpers.mount.umount_all_list("/no/match", fake_mounts) - assert ret == [] - - ret = pmb.helpers.mount.umount_all_list("/test/var/cache", fake_mounts) - assert ret == ["/test/var/cache"] - - ret = pmb.helpers.mount.umount_all_list("/test", fake_mounts) - assert ret == ["/test/var/cache", "/test/proc", "/test/home/pmos/packages", - "/test/dev/loop0p2", "/test"] diff --git a/test/test_newapkbuild.py b/test/test_newapkbuild.py deleted file mode 100644 index 07256f91..00000000 --- a/test/test_newapkbuild.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import glob -import os -import pytest -import shutil -import sys - -import pmb_test # noqa -import pmb_test.const -import pmb.build.newapkbuild -import pmb.config -import pmb.config.init -import pmb.helpers.logging - - -@pytest.fixture -def args(tmpdir, request): - import pmb.parse - cfg = f"{pmb_test.const.testdata}/channels.cfg" - sys.argv = ["pmbootstrap.py", "--config-channels", cfg, "init"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_newapkbuild(args, monkeypatch, tmpdir): - testdata = pmb_test.const.testdata - - # Fake functions - def confirm_true(*nargs): - return True - - def confirm_false(*nargs): - return False - - # Preparation - monkeypatch.setattr(pmb.helpers.cli, "confirm", confirm_false) - pmb.build.init(args) - args.aports = tmpdir = str(tmpdir) - shutil.copy(f"{testdata}/pmaports.cfg", args.aports) - func = pmb.build.newapkbuild - - # Show the help - func(args, "main", ["-h"]) - assert glob.glob(f"{tmpdir}/*") == [f"{tmpdir}/pmaports.cfg"] - - # Test package - pkgname = "testpackage" - func(args, "main", [pkgname]) - apkbuild_path = tmpdir + "/main/" + pkgname + "/APKBUILD" - apkbuild = pmb.parse.apkbuild(apkbuild_path) - assert apkbuild["pkgname"] == pkgname - assert apkbuild["pkgdesc"] == "" - - # Don't overwrite - with pytest.raises(RuntimeError) as e: - func(args, "main", [pkgname]) - assert "Aborted" in str(e.value) - - # Overwrite - monkeypatch.setattr(pmb.helpers.cli, "confirm", confirm_true) - pkgdesc = "testdescription" - func(args, "main", ["-d", pkgdesc, pkgname]) - pmb.helpers.other.cache["apkbuild"] = {} - apkbuild = pmb.parse.apkbuild(apkbuild_path) - assert apkbuild["pkgname"] == pkgname - assert apkbuild["pkgdesc"] == pkgdesc - - # There should be no src folder - assert not os.path.exists(tmpdir + "/main/" + pkgname + "/src") diff --git a/test/test_parse_apkbuild.py b/test/test_parse_apkbuild.py deleted file mode 100644 index 2b91ca12..00000000 --- a/test/test_parse_apkbuild.py +++ /dev/null @@ -1,163 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import pytest -import sys - -import pmb_test -import pmb_test.const -import pmb.parse._apkbuild - - -@pytest.fixture -def args(tmpdir, request): - import pmb.parse - sys.argv = ["pmbootstrap.py", "init"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_subpackages(): - testdata = pmb_test.const.testdata - path = testdata + "/apkbuild/APKBUILD.subpackages" - apkbuild = pmb.parse.apkbuild(path, check_pkgname=False) - - subpkg = apkbuild["subpackages"]["simple"] - assert subpkg["pkgdesc"] == "" - # Inherited from parent package - assert subpkg["depends"] == ["postmarketos-base"] - - subpkg = apkbuild["subpackages"]["custom"] - assert subpkg["pkgdesc"] == "This is one of the custom subpackages" - assert subpkg["depends"] == ["postmarketos-base", "glibc"] - - # Successful extraction - path = (testdata + "/init_questions_device/aports/device/testing/" - "device-nonfree-firmware/APKBUILD") - apkbuild = pmb.parse.apkbuild(path) - subpkg = (apkbuild["subpackages"] - ["device-nonfree-firmware-nonfree-firmware"]) - assert subpkg["pkgdesc"] == "firmware description" - - # Can't find the pkgdesc in the function - path = testdata + "/apkbuild/APKBUILD.missing-pkgdesc-in-subpackage" - apkbuild = pmb.parse.apkbuild(path, check_pkgname=False) - subpkg = (apkbuild["subpackages"] - ["missing-pkgdesc-in-subpackage-subpackage"]) - assert subpkg["pkgdesc"] == "" - - # Can't find the function - assert apkbuild["subpackages"]["invalid-function"] is None - - -def test_kernels(args): - # Kernel hardcoded in depends - args.aports = pmb_test.const.testdata + "/init_questions_device/aports" - func = pmb.parse._apkbuild.kernels - device = "lg-mako" - assert func(args, device) is None - - # Upstream and downstream kernel - device = "sony-amami" - ret = {"downstream": "Downstream description", - "mainline": "Mainline description"} - assert func(args, device) == ret - - # Long kernel name (e.g. two different mainline kernels) - device = "wileyfox-crackling" - ret = {"mainline": "Mainline kernel (no modem)", - "mainline-modem": "Mainline kernel (with modem)", - "downstream": "Downstream kernel"} - assert func(args, device) == ret - - -def test_depends_in_depends(): - path = pmb_test.const.testdata + "/apkbuild/APKBUILD.depends-in-depends" - apkbuild = pmb.parse.apkbuild(path, check_pkgname=False) - assert apkbuild["depends"] == ["first", "second", "third"] - - -def test_parse_attributes(): - # Convenience function for calling the function with a block of text - def func(attribute, block): - lines = block.split("\n") - for i in range(0, len(lines)): - lines[i] += "\n" - i = 0 - path = "(testcase in " + __file__ + ")" - print("=== parsing attribute '" + attribute + "' in test block:") - print(block) - print("===") - return pmb.parse._apkbuild.parse_attribute(attribute, lines, i, path) - - assert func("depends", "pkgname='test'") == (False, None, 0) - - assert func("pkgname", 'pkgname="test"') == (True, "test", 0) - - assert func("pkgname", "pkgname='test'") == (True, "test", 0) - - assert func("pkgname", "pkgname=test") == (True, "test", 0) - - assert func("pkgname", 'pkgname="test\n"') == (True, "test", 1) - - assert func("pkgname", 'pkgname="\ntest\n"') == (True, "test", 2) - - assert func("pkgname", 'pkgname="test" # random comment\npkgrel=3') == \ - (True, "test", 0) - - assert func("pkgver", 'pkgver=2.37 # random comment\npkgrel=3') == \ - (True, "2.37", 0) - - assert func("depends", "depends='\nfirst\nsecond\nthird\n'#") == \ - (True, "first second third", 4) - - assert func("depends", 'depends="\nfirst\n\tsecond third"') == \ - (True, "first second third", 2) - - assert func("depends", 'depends=') == (True, "", 0) - - with pytest.raises(RuntimeError) as e: - func("depends", 'depends="\nmissing\nend\nquote\nsign') - assert str(e.value).startswith("Can't find closing") - - with pytest.raises(RuntimeError) as e: - func("depends", 'depends="') - assert str(e.value).startswith("Can't find closing") - - -def test_variable_replacements(): - path = pmb_test.const.testdata + "/apkbuild/APKBUILD.variable-replacements" - apkbuild = pmb.parse.apkbuild(path, check_pkgname=False) - assert apkbuild["pkgdesc"] == "this should not affect variable replacement" - assert apkbuild["url"] == "replacements variable string-replacements" - assert list(apkbuild["subpackages"].keys()) == ["replacements", "test"] - - assert apkbuild["subpackages"]["replacements"] is None - test_subpkg = apkbuild["subpackages"]["test"] - assert test_subpkg["pkgdesc"] == ("this should not affect variable " - "replacement") - - -def test_parse_maintainers(): - path = pmb_test.const.testdata + "/apkbuild/APKBUILD.lint" - maintainers = [ - "Oliver Smith ", - "Hello World " - ] - - assert pmb.parse._apkbuild.maintainers(path) == maintainers - - -def test_parse_unmaintained(): - path = (f"{pmb_test.const.testdata}/apkbuild" - "/APKBUILD.missing-pkgdesc-in-subpackage") - assert pmb.parse._apkbuild.unmaintained(path) == "This is broken!" - - -def test_weird_pkgver(): - path = (f"{pmb_test.const.testdata}/apkbuild" - "/APKBUILD.weird-pkgver") - apkbuild = pmb.parse.apkbuild(path, check_pkgname=False, check_pkgver=True) - assert apkbuild["pkgver"] == "3.0.0_alpha369-r0" diff --git a/test/test_parse_apkindex.py b/test/test_parse_apkindex.py deleted file mode 100644 index cb17be54..00000000 --- a/test/test_parse_apkindex.py +++ /dev/null @@ -1,395 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -""" Test pmb.parse.apkindex """ -import collections -import os -import pytest -import sys - -import pmb_test # noqa -import pmb.parse.apkindex -import pmb.helpers.logging -import pmb.helpers.repo - - -@pytest.fixture -def args(tmpdir, request): - import pmb.parse - sys.argv = ["pmbootstrap", "init"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_parse_next_block_exceptions(): - # Mapping of input files (inside the /test/testdata/apkindex) to - # error message substrings - mapping = {"key_twice": "specified twice", - "key_missing": "Missing required key", - "new_line_missing": "does not end with a new line!"} - - # Parse the files - for file, error_substr in mapping.items(): - path = pmb.config.pmb_src + "/test/testdata/apkindex/" + file - with open(path, "r", encoding="utf-8") as handle: - lines = handle.readlines() - - with pytest.raises(RuntimeError) as e: - pmb.parse.apkindex.parse_next_block(path, lines, [0]) - assert error_substr in str(e.value) - - -def test_parse_next_block_no_error(): - # Read the file - func = pmb.parse.apkindex.parse_next_block - path = pmb.config.pmb_src + "/test/testdata/apkindex/no_error" - with open(path, "r", encoding="utf-8") as handle: - lines = handle.readlines() - - # First block - start = [0] - block = {'arch': 'x86_64', - 'depends': [], - 'origin': 'musl', - 'pkgname': 'musl', - 'provides': ['so:libc.musl-x86_64.so.1'], - 'timestamp': '1515217616', - 'version': '1.1.18-r5'} - assert func(path, lines, start) == block - assert start == [24] - - # Second block - block = {'arch': 'x86_64', - 'depends': ['ca-certificates', - 'so:libc.musl-x86_64.so.1', - 'so:libcurl.so.4', - 'so:libz.so.1'], - 'origin': 'curl', - 'pkgname': 'curl', - 'provides': ['cmd:curl'], - 'timestamp': '1512030418', - 'version': '7.57.0-r0'} - assert func(path, lines, start) == block - assert start == [45] - - # No more blocks - assert func(path, lines, start) is None - assert start == [45] - - -def test_parse_next_block_virtual(): - """ - Test parsing a virtual package from an APKINDEX. - """ - # Read the file - func = pmb.parse.apkindex.parse_next_block - path = pmb.config.pmb_src + "/test/testdata/apkindex/virtual_package" - with open(path, "r", encoding="utf-8") as handle: - lines = handle.readlines() - - # First block - start = [0] - block = {'arch': 'x86_64', - 'depends': ['so:libc.musl-x86_64.so.1'], - 'origin': 'hello-world', - 'pkgname': 'hello-world', - 'provides': ['cmd:hello-world'], - 'timestamp': '1500000000', - 'version': '2-r0'} - assert func(path, lines, start) == block - assert start == [20] - - # Second block: virtual package - block = {'arch': 'noarch', - 'depends': ['hello-world'], - 'pkgname': '.pmbootstrap', - 'provides': [], - 'version': '0'} - assert func(path, lines, start) == block - assert start == [31] - - # No more blocks - assert func(path, lines, start) is None - assert start == [31] - - -def test_parse_next_block_conflict(): - """ - Test parsing a package that specifies a conflicting dependency from an - APKINDEX. - """ - # Read the file - func = pmb.parse.apkindex.parse_next_block - path = pmb.config.pmb_src + "/test/testdata/apkindex/conflict" - with open(path, "r", encoding="utf-8") as handle: - lines = handle.readlines() - - # First block - start = [0] - block = {'arch': 'x86_64', - 'depends': ['!conflict', 'so:libc.musl-x86_64.so.1'], - 'origin': 'hello-world', - 'pkgname': 'hello-world', - 'provides': ['cmd:hello-world'], - 'timestamp': '1500000000', - 'version': '2-r0'} - assert func(path, lines, start) == block - assert start == [20] - - # No more blocks - assert func(path, lines, start) is None - assert start == [20] - - -def test_parse_add_block(args): - func = pmb.parse.apkindex.parse_add_block - multiple_providers = False - - # One package without alias - ret = {} - block = {"pkgname": "test", "version": "2"} - alias = None - func(ret, block, alias, multiple_providers) - assert ret == {"test": block} - - # Older packages must not overwrite newer ones - block_old = {"pkgname": "test", "version": "1"} - func(ret, block_old, alias, multiple_providers) - assert ret == {"test": block} - - # Newer packages must overwrite older ones - block_new = {"pkgname": "test", "version": "3"} - func(ret, block_new, alias, multiple_providers) - assert ret == {"test": block_new} - - # Add package with alias - alias = "test_alias" - func(ret, block_new, alias, multiple_providers) - assert ret == {"test": block_new, "test_alias": block_new} - - -def test_parse_add_block_multiple_providers(args): - func = pmb.parse.apkindex.parse_add_block - - # One package without alias - ret = {} - block = {"pkgname": "test", "version": "2"} - alias = None - func(ret, block, alias) - assert ret == {"test": {"test": block}} - - # Older packages must not overwrite newer ones - block_old = {"pkgname": "test", "version": "1"} - func(ret, block_old, alias) - assert ret == {"test": {"test": block}} - - # Newer packages must overwrite older ones - block_new = {"pkgname": "test", "version": "3"} - func(ret, block_new, alias) - assert ret == {"test": {"test": block_new}} - - # Add package with alias - alias = "test_alias" - func(ret, block_new, alias) - assert ret == {"test": {"test": block_new}, - "test_alias": {"test": block_new}} - - # Add another package with the same alias - alias = "test_alias" - block_test2 = {"pkgname": "test2", "version": "1"} - func(ret, block_test2, alias) - assert ret == {"test": {"test": block_new}, - "test_alias": {"test": block_new, "test2": block_test2}} - - -def test_parse_invalid_path(): - assert pmb.parse.apkindex.parse("/invalid/path/APKINDEX") == {} - - -def test_parse_cached(args, tmpdir): - # Create a real file (cache looks at the last modified date) - path = str(tmpdir) + "/APKINDEX" - pmb.helpers.run.user(args, ["touch", path]) - lastmod = os.path.getmtime(path) - - # Fill the cache - pmb.helpers.other.cache["apkindex"][path] = { - "lastmod": lastmod, - "multiple": "cached_result_multiple", - "single": "cached_result_single", - } - - # Verify cache usage - func = pmb.parse.apkindex.parse - assert func(path, True) == "cached_result_multiple" - assert func(path, False) == "cached_result_single" - - # Make cache invalid - pmb.helpers.other.cache["apkindex"][path]["lastmod"] -= 10 - assert func(path, True) == {} - - # Delete the cache (run twice for both code paths) - assert pmb.parse.apkindex.clear_cache(path) is True - assert pmb.helpers.other.cache["apkindex"] == {} - assert pmb.parse.apkindex.clear_cache(path) is False - - -def test_parse(): - path = pmb.config.pmb_src + "/test/testdata/apkindex/no_error" - block_musl = {'arch': 'x86_64', - 'depends': [], - 'origin': 'musl', - 'pkgname': 'musl', - 'provides': ['so:libc.musl-x86_64.so.1'], - 'timestamp': '1515217616', - 'version': '1.1.18-r5'} - block_curl = {'arch': 'x86_64', - 'depends': ['ca-certificates', - 'so:libc.musl-x86_64.so.1', - 'so:libcurl.so.4', - 'so:libz.so.1'], - 'origin': 'curl', - 'pkgname': 'curl', - 'provides': ['cmd:curl'], - 'timestamp': '1512030418', - 'version': '7.57.0-r0'} - - # Test without multiple_providers - ret_single = {'cmd:curl': block_curl, - 'curl': block_curl, - 'musl': block_musl, - 'so:libc.musl-x86_64.so.1': block_musl} - assert pmb.parse.apkindex.parse(path, False) == ret_single - assert pmb.helpers.other.cache["apkindex"][path]["single"] == ret_single - - # Test with multiple_providers - ret_multiple = {'cmd:curl': {"curl": block_curl}, - 'curl': {"curl": block_curl}, - 'musl': {"musl": block_musl}, - 'so:libc.musl-x86_64.so.1': {"musl": block_musl}} - assert pmb.parse.apkindex.parse(path, True) == ret_multiple - assert ( - pmb.helpers.other.cache["apkindex"][path]["multiple"] == ret_multiple - ) - - -def test_parse_virtual(): - """ - This APKINDEX contains a virtual package .pbmootstrap. It must not be part - of the output. - """ - path = pmb.config.pmb_src + "/test/testdata/apkindex/virtual_package" - block = {'arch': 'x86_64', - 'depends': ['so:libc.musl-x86_64.so.1'], - 'origin': 'hello-world', - 'pkgname': 'hello-world', - 'provides': ['cmd:hello-world'], - 'timestamp': '1500000000', - 'version': '2-r0'} - ret = {"hello-world": block, "cmd:hello-world": block} - assert pmb.parse.apkindex.parse(path, False) == ret - assert pmb.helpers.other.cache["apkindex"][path]["single"] == ret - - -def test_providers_invalid_package(args, tmpdir): - # Create empty APKINDEX - path = str(tmpdir) + "/APKINDEX" - pmb.helpers.run.user(args, ["touch", path]) - - # Test with must_exist=False - func = pmb.parse.apkindex.providers - package = "test" - indexes = [path] - assert func(args, package, None, False, indexes) == {} - - # Test with must_exist=True - with pytest.raises(RuntimeError) as e: - func(args, package, None, True, indexes) - assert str(e.value).startswith("Could not find package") - - -def test_providers_highest_version(args, monkeypatch): - """ - In this test, we simulate 3 APKINDEX files ("i0", "i1", "i2" instead of - full paths to real APKINDEX.tar.gz files), and each of them has a different - version of the same package. The highest version must win, no matter in - which order the APKINDEX files are processed. - """ - # Fake parse function - def return_fake_parse(path): - version_mapping = {"i0": "2", "i1": "3", "i2": "1"} - package_block = {"pkgname": "test", "version": version_mapping[path]} - return {"test": {"test": package_block}} - monkeypatch.setattr(pmb.parse.apkindex, "parse", return_fake_parse) - - # Verify that it picks the highest version - func = pmb.parse.apkindex.providers - providers = func(args, "test", indexes=["i0", "i1", "i2"]) - assert providers["test"]["version"] == "3" - - -def test_provider_highest_priority(args, monkeypatch): - # Verify that it picks the provider with highest priority - func = pmb.parse.apkindex.provider_highest_priority - - provider_none_a = {"pkgname": "a", "provides": ["test"]} - provider_none_b = {"pkgname": "b", "provides": ["test"]} - provider_low_c = {"pkgname": "c", "provides": ["test"], - "provider_priority": 42} - provider_low_d = {"pkgname": "d", "provides": ["test"], - "provider_priority": 42} - provider_high = {"pkgname": "e", "provides": ["test"], - "provider_priority": 1337} - - # No provider has a priority - providers = {"a": provider_none_a} - assert func(providers, "test") == providers - providers = {"a": provider_none_a, "b": provider_none_b} - assert func(providers, "test") == providers - - # One provider has a priority, another one does not - providers = {"a": provider_none_a, "e": provider_high} - assert func(providers, "test") == {"e": provider_high} - - # One provider has a priority, another one has a higher priority - providers = {"c": provider_low_c, "e": provider_high} - assert func(providers, "test") == {"e": provider_high} - - # One provider has a priority, another one has the same priority - providers = {"c": provider_low_c, "d": provider_low_d} - assert func(providers, "test") == providers - - # + some package without priority at all should be filtered out - providers2 = providers.copy() - providers2["a"] = provider_none_a - assert func(providers2, "test") == providers - - -def test_package(args, monkeypatch): - # Override pmb.parse.apkindex.providers() - providers = collections.OrderedDict() - - def return_providers(*args, **kwargs): - return providers - monkeypatch.setattr(pmb.parse.apkindex, "providers", return_providers) - - # Provider with the same pkgname - func = pmb.parse.apkindex.package - pkgname = "test" - providers = {"test2": {"pkgname": "test2"}, "test": {"pkgname": "test"}} - assert func(args, pkgname) == {"pkgname": "test"} - - # First provider - providers = {"test2": {"pkgname": "test2"}, "test3": {"pkgname": "test3"}} - assert func(args, pkgname) == {"pkgname": "test2"} - - # No provider (with must_exist) - providers = {} - with pytest.raises(RuntimeError) as e: - func(args, pkgname) - assert "not found in any APKINDEX" in str(e.value) - - # No provider (without must_exist) - assert func(args, pkgname, must_exist=False) is None diff --git a/test/test_parse_depends.py b/test/test_parse_depends.py deleted file mode 100644 index e13f7e64..00000000 --- a/test/test_parse_depends.py +++ /dev/null @@ -1,167 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -""" Test pmb.parse.depends """ -import collections -import pytest -import sys - -import pmb_test # noqa -import pmb.config -import pmb.config.init -import pmb.helpers.logging -import pmb.parse.depends - - -@pytest.fixture -def args(tmpdir, request): - import pmb.parse - sys.argv = ["pmbootstrap", "init"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_package_from_aports(args): - func = pmb.parse.depends.package_from_aports - assert func(args, "invalid-package") is None - assert func(args, "hello-world") == {"pkgname": "hello-world", - "depends": [], - "version": "1-r6"} - - -def test_package_provider(args, monkeypatch): - # Override pmb.parse.apkindex.providers() - providers = collections.OrderedDict() - - def return_providers(*args, **kwargs): - return providers - monkeypatch.setattr(pmb.parse.apkindex, "providers", return_providers) - - # Override pmb.chroot.apk.installed() - installed = {} - - def return_installed(*args, **kwards): - return installed - monkeypatch.setattr(pmb.chroot.apk, "installed", return_installed) - - # 0. No provider - pkgname = "test" - pkgnames_install = [] - func = pmb.parse.depends.package_provider - assert func(args, pkgname, pkgnames_install) is None - - # 1. Only one provider - package = {"pkgname": "test", "version": "1234"} - providers = {"test": package} - assert func(args, pkgname, pkgnames_install) == package - - # 2. Provider with the same package name - package_two = {"pkgname": "test-two", "provides": ["test"]} - providers = {"test-two": package_two, "test": package} - assert func(args, pkgname, pkgnames_install) == package - - # 3. Pick a package that will be installed anyway - providers = {"test_": package, "test-two": package_two} - installed = {"test_": package} - pkgnames_install = ["test-two"] - assert func(args, pkgname, pkgnames_install) == package_two - - # 4. Pick a package that is already installed - pkgnames_install = [] - assert func(args, pkgname, pkgnames_install) == package - - # 5. Pick package with highest priority - package_with_priority = {"pkgname": "test-priority", "provides": ["test"], - "provider_priority": 100} - providers = {"test-two": package_two, - "test-priority": package_with_priority} - assert func(args, pkgname, pkgnames_install) == package_with_priority - - # 6. Pick the first one - providers = {"test_": package, "test-two": package_two} - installed = {} - assert func(args, pkgname, pkgnames_install) == package - - -def test_package_from_index(args, monkeypatch): - # Override pmb.parse.depends.package_provider() - provider = None - - def return_provider(*args, **kwargs): - return provider - monkeypatch.setattr(pmb.parse.depends, "package_provider", - return_provider) - - func = pmb.parse.depends.package_from_index - aport = {"pkgname": "test", "version": "2"} - pkgname = "test" - pkgnames_install = [] - - # No binary package providers - assert func(args, pkgname, pkgnames_install, aport) is aport - - # Binary package outdated - provider = {"pkgname": "test", "version": "1"} - assert func(args, pkgname, pkgnames_install, aport) is aport - - # Binary package up-to-date - for version in ["2", "3"]: - provider = {"pkgname": "test", "version": version} - assert func(args, pkgname, pkgnames_install, aport) is provider - - -def test_recurse_invalid(args, monkeypatch): - func = pmb.parse.depends.recurse - - # Invalid package - with pytest.raises(RuntimeError) as e: - func(args, ["invalid-pkgname"]) - assert str(e.value).startswith("Could not find dependency") - - -def return_none(*args, **kwargs): - return None - - -def test_recurse(args, monkeypatch): - """ - Test recursing through the following dependencies: - - test: - libtest - so:libtest.so.1 - libtest: - libtest_depend - libtest_depend: - so:libtest.so.1: - libtest_depend - """ - # Override finding the package in aports: always no result - monkeypatch.setattr(pmb.parse.depends, "package_from_aports", - return_none) - - # Override depends returned from APKINDEX - depends = { - "test": ["libtest", "so:libtest.so.1"], - "libtest": ["libtest_depend"], - "libtest_depend": ["!libtest_conflict", "!libtest_conflict_missing"], - "libtest_conflict": [], - "so:libtest.so.1": ["libtest_depend"], - } - - def package_from_index(args, pkgname, install, aport, suffix): - if pkgname in depends: - return {"pkgname": pkgname, "depends": depends[pkgname]} - else: - return None - monkeypatch.setattr(pmb.parse.depends, "package_from_index", - package_from_index) - - # Run - func = pmb.parse.depends.recurse - pkgnames = ["test", "so:libtest.so.1"] - result = ["test", "so:libtest.so.1", "libtest", "libtest_depend", - "!libtest_conflict"] - assert func(args, pkgnames) == result diff --git a/test/test_parse_deviceinfo.py b/test/test_parse_deviceinfo.py deleted file mode 100644 index f1c3cdc2..00000000 --- a/test/test_parse_deviceinfo.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import pytest -import sys - -import pmb_test.const -import pmb.parse - - -@pytest.fixture -def args(tmpdir, request): - import pmb.parse - sys.argv = ["pmbootstrap.py", "init"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_kernel_suffix(args): - args.aports = pmb_test.const.testdata + "/deviceinfo/aports" - device = "multiple-kernels" - - kernel = "mainline" - deviceinfo = pmb.parse.deviceinfo(args, device, kernel) - assert deviceinfo["append_dtb"] == "yes" - assert deviceinfo["dtb"] == "mainline-dtb" - - kernel = "mainline-modem" - deviceinfo = pmb.parse.deviceinfo(args, device, kernel) - assert deviceinfo["append_dtb"] == "yes" - assert deviceinfo["dtb"] == "mainline-modem-dtb" - - kernel = "downstream" - deviceinfo = pmb.parse.deviceinfo(args, device, kernel) - assert deviceinfo["append_dtb"] == "yes" - assert deviceinfo["dtb"] == "downstream-dtb" diff --git a/test/test_parse_kconfig.py b/test/test_parse_kconfig.py deleted file mode 100644 index 8c6d311c..00000000 --- a/test/test_parse_kconfig.py +++ /dev/null @@ -1,465 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -""" Test pmb/parse/kconfig.py """ -import pytest -import sys -import os - -import pmb_test # noqa -import pmb.parse.kconfig - -test_options_checked_count = None - - -@pytest.fixture -def args(tmpdir, request): - import pmb.parse - sys.argv = ["pmbootstrap.py", "kconfig", "check"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def patch_config(monkeypatch): - """ - Delete the real kconfig_options_* variables in pmb/config/__init__.py and - replace them with a very basic config for the tests. The idea is that it - should use all features of the kconfig check code, so we can test all the - code paths. - """ - for key in list(pmb.config.__dict__.keys()): - if key.startswith("kconfig_options"): - monkeypatch.delattr(pmb.config, key) - - monkeypatch.setattr(pmb.config, "kconfig_options", { - ">=0.0.0": { # all versions - "all": { # all arches - "ANDROID_PARANOID_NETWORK": False, - "BLK_DEV_INITRD": True, - "DEFAULT_HOSTNAME": "(none)", - }, - }, - ">=2.6.0": { - "all": { - "BINFMT_ELF": True, - }, - }, - "<4.7.0": { - "all": { - "DEVPTS_MULTIPLE_INSTANCES": True, - }, - }, - "<5.2.0": { - "armhf armv7 x86": { - "LBDAF": True - }, - }, - }, False) - - monkeypatch.setattr(pmb.config, "kconfig_options_waydroid", { - ">=0.0.0": { - "all": { - "SQUASHFS": True, - "ANDROID_BINDERFS": False, - "ANDROID_BINDER_DEVICES": ["binder", "hwbinder", "vndbinder"], - } - }, - }, False) - - monkeypatch.setattr(pmb.config, "kconfig_options_nftables", { - ">=3.13.0 <5.17": { - "all": { - "NFT_COUNTER": True, - }, - }, - }, False) - - -def test_get_all_component_names(monkeypatch): - patch_config(monkeypatch) - func = pmb.parse.kconfig.get_all_component_names - assert func() == ["waydroid", "nftables"] - - -def test_is_set(): - config = ("CONFIG_WIREGUARD=m\n" - "# CONFIG_EXT2_FS is not set\n" - "CONFIG_EXT4_FS=y\n") - func = pmb.parse.kconfig.is_set - assert func(config, "WIREGUARD") is True - assert func(config, "EXT4_FS") is True - assert func(config, "NON_EXISTING") is False - - -def test_is_set_str(): - config = 'CONFIG_DEFAULT_HOSTNAME="(none)"\n' - func = pmb.parse.kconfig.is_set_str - option = "DEFAULT_HOSTNAME" - assert func(config, option, "(none)") is True - assert func(config, option, "hello") is False - assert func(config, f"{option}_2", "(none)") is False - - -def test_is_in_array(): - config = 'CONFIG_ANDROID_BINDER_DEVICES="binder,hwbinder,vndbinder"\n' - func = pmb.parse.kconfig.is_in_array - option = "ANDROID_BINDER_DEVICES" - assert func(config, option, "binder") is True - assert func(config, option, "hwbinder") is True - assert func(config, option, "vndbinder") is True - assert func(config, option, "invalidbinder") is False - assert func(config, f"{option}_2", "binder") is False - - -def test_check_option(): - func = pmb.parse.kconfig.check_option - config = ('CONFIG_BOOL=m\n' - 'CONFIG_LIST="a,b,c"\n' - 'CONFIG_STR="test"\n') - path = "/home/user/myconfig.aarch64" - - assert func("test", False, config, path, "BOOL", True) is True - assert func("test", True, config, path, "BOOL", True) is True - assert func("test", True, config, path, "NON_EXISTING", True) is False - assert func("test", True, config, path, "STR", "test") is True - assert func("test", True, config, path, "STR", "test2") is False - assert func("test", True, config, path, "LIST", ["a"]) is True - assert func("test", True, config, path, "LIST", ["d"]) is False - - with pytest.raises(RuntimeError) as e: - func("test", True, config, path, "TEST", {"dict": "notsupported"}) - assert "is not supported" in str(e.value) - - with pytest.raises(RuntimeError) as e: - func("test", True, config, path, "TEST", None) - assert "is not supported" in str(e.value) - - -def test_check_config_options_set(): - func = pmb.parse.kconfig.check_config_options_set - config = ('CONFIG_BOOL=m\n' - 'CONFIG_LIST="a,b,c"\n' - 'CONFIG_STR="test"\n') - path = "/home/user/myconfig.aarch64" - arch = "aarch64" - pkgver = "6.0" - component = "testcomponent" - - # Skip check because version is too low - options = { - ">=6.0.1": { - "all": { - "BOOL": False - } - } - } - assert func(config, path, arch, options, component, pkgver) is True - - # Skip check because version is too high - options = { - "<6.0": { - "all": { - "BOOL": False - } - } - } - assert func(config, path, arch, options, component, pkgver) is True - - # Skip with two version that don't match - options = { - "<6.2 >=6.0.1": { - "all": { - "BOOL": False - } - } - } - assert func(config, path, arch, options, component, pkgver) is True - - # Version matches, arch does not match - options = { - ">=6.0": { - "armhf": { - "BOOL": False - } - } - } - assert func(config, path, arch, options, component, pkgver) is True - - # Version matches, arch matches (aarch64) - options = { - ">=6.0": { - "aarch64": { - "BOOL": False - } - } - } - assert func(config, path, arch, options, component, pkgver) is False - - # Version matches, arch matches (all) - options = { - ">=6.0": { - "all": { - "BOOL": False - } - } - } - assert func(config, path, arch, options, component, pkgver) is False - - # Version matches, arch matches (all), rule passes - options = { - ">=6.0": { - "all": { - "BOOL": True - } - } - } - assert func(config, path, arch, options, component, pkgver) is True - - -def test_check_config_options_set_details(monkeypatch): - global test_options_checked_count - - func = pmb.parse.kconfig.check_config_options_set - config = ('CONFIG_BOOL=m\n' - 'CONFIG_LIST="a,b,c"\n' - 'CONFIG_STR="test"\n') - path = "/home/user/myconfig.aarch64" - arch = "aarch64" - pkgver = "6.0" - component = "testcomponent" - - def check_option_fake(*args, **kwargs): - global test_options_checked_count - test_options_checked_count += 1 - return False - - monkeypatch.setattr(pmb.parse.kconfig, "check_option", check_option_fake) - - options = { - ">=0.0.0": { - "all": { - "BOOL": False, - "STR": False, - } - } - } - - # No details: stop after first error - details = False - test_options_checked_count = 0 - assert func(config, path, arch, options, component, pkgver, details) is False - assert test_options_checked_count == 1 - - # Details: don't stop, do both checks - details = True - test_options_checked_count = 0 - assert func(config, path, arch, options, component, pkgver, details) is False - assert test_options_checked_count == 2 - - -def test_check_config(monkeypatch, tmpdir): - # Write test kernel config - tmpdir = str(tmpdir) - path = f"{tmpdir}/myconfig.aarch64" - with open(path, "w") as handle: - handle.write('CONFIG_BOOL=m\n' - 'CONFIG_LIST="a,b,c"\n' - 'CONFIG_STR="test"\n') - - patch_config(monkeypatch) - - func = pmb.parse.kconfig.check_config - arch = "aarch64" - pkgver = "6.0" - - # Invalid component - components_list = ["invalid-component-name"] - with pytest.raises(AssertionError) as e: - func(path, arch, pkgver, components_list) - assert "invalid kconfig component name" in str(e.value) - - # Fail base check - components_list = [] - assert func(path, arch, pkgver, components_list) is False - - # Fails base check, even with enforce=False - details = False - enforce = False - assert func(path, arch, pkgver, components_list, details, enforce) is False - - # Pass base check - with open(path, "w") as handle: - handle.write('CONFIG_BLK_DEV_INITRD=y\n' - 'CONFIG_DEFAULT_HOSTNAME="(none)"\n' - 'CONFIG_BINFMT_ELF=y\n') - components_list = [] - assert func(path, arch, pkgver, components_list) is True - - # Fail additional check - components_list = ["waydroid"] - assert func(path, arch, pkgver, components_list) is False - - # Fail additional check, but result is still True with enforce=False - components_list = ["waydroid"] - details = True - enforce = False - assert func(path, arch, pkgver, components_list, details, enforce) is True - - -def test_check(args, monkeypatch, tmpdir): - func = pmb.parse.kconfig.check - details = True - components_list = [] - patch_config(monkeypatch) - - # Create fake pmaports kernel structure - tmpdir = str(tmpdir) - monkeypatch.setattr(args, "aports", tmpdir) - path_aport = f"{tmpdir}/device/community/linux-nokia-n900" - path_apkbuild = f"{path_aport}/APKBUILD" - os.makedirs(path_aport) - - # APKBUILD - with open(path_apkbuild, "w") as handle: - handle.write('pkgname=linux-nokia-n900\n' - 'pkgver=5.15\n' - 'options="pmb:kconfigcheck-nftables"\n') - - # Non-existing #1 - must_exist = True - pkgname = "linux-does-not-exist" - with pytest.raises(RuntimeError) as e: - func(args, pkgname, components_list, details, must_exist) - assert "Could not find aport" in str(e.value) - - # Non-existing #2 - must_exist = False - pkgname = "linux-does-not-exist" - assert func(args, pkgname, components_list, details, must_exist) is None - - # Invalid kernel config name - path_kconfig = f"{path_aport}/config-nokia-n900_armv7" - with open(path_kconfig, "w") as handle: - handle.write('CONFIG_BOOL=m\n') - must_exist = True - pkgname = "linux-nokia-n900" - with pytest.raises(RuntimeError) as e: - func(args, pkgname, components_list, details, must_exist) - assert "is not a valid kernel config" in str(e.value) - os.unlink(path_kconfig) - - # Pass checks of base and nftables - path_kconfig = f"{path_aport}/config-nokia-n900.armv7" - with open(path_kconfig, "w") as handle: - handle.write('CONFIG_BLK_DEV_INITRD=y\n' - 'CONFIG_DEFAULT_HOSTNAME="(none)"\n' - 'CONFIG_BINFMT_ELF=y\n' - 'CONFIG_NFT_COUNTER=y\n') - must_exist = True - pkgname = "nokia-n900" - assert func(args, pkgname, components_list, details, must_exist) is True - - # Don't pass nftables check - with open(path_kconfig, "w") as handle: - handle.write('CONFIG_BLK_DEV_INITRD=y\n' - 'CONFIG_DEFAULT_HOSTNAME="(none)"\n' - 'CONFIG_BINFMT_ELF=y\n') - assert func(args, pkgname, components_list, details, must_exist) is False - - # Don't pass waydroid check (extra component check passed via cmdline) - with open(path_kconfig, "w") as handle: - handle.write('CONFIG_BLK_DEV_INITRD=y\n' - 'CONFIG_DEFAULT_HOSTNAME="(none)"\n' - 'CONFIG_BINFMT_ELF=y\n' - 'CONFIG_NFT_COUNTER=y\n') - components_list = ["waydroid"] - assert func(args, pkgname, components_list, details, must_exist) is False - - -def test_extract_arch(tmpdir): - func = pmb.parse.kconfig.extract_arch - path = f"{tmpdir}/config" - - with open(path, "w") as handle: - handle.write('CONFIG_ARM=y\n') - assert func(path) == "armv7" - - with open(path, "w") as handle: - handle.write('CONFIG_ARM64=y\n') - assert func(path) == "aarch64" - - with open(path, "w") as handle: - handle.write('CONFIG_RISCV=y\n') - assert func(path) == "riscv64" - - with open(path, "w") as handle: - handle.write('CONFIG_X86_32=y\n') - assert func(path) == "x86" - - with open(path, "w") as handle: - handle.write('CONFIG_X86_64=y\n') - assert func(path) == "x86_64" - - with open(path, "w") as handle: - handle.write('hello') - assert func(path) == "unknown" - - -def test_extract_version(tmpdir): - func = pmb.parse.kconfig.extract_version - path = f"{tmpdir}/config" - - with open(path, "w") as handle: - handle.write("#\n" - "# Automatically generated file; DO NOT EDIT.\n" - "# Linux/arm64 3.10.93 Kernel Configuration\n") - assert func(path) == "3.10.93" - - with open(path, "w") as handle: - handle.write("#\n" - "# Automatically generated file; DO NOT EDIT.\n" - "# Linux/arm64 6.2.0 Kernel Configuration\n") - assert func(path) == "6.2.0" - - with open(path, "w") as handle: - handle.write("#\n" - "# Automatically generated file; DO NOT EDIT.\n" - "# Linux/riscv 6.1.0-rc3 Kernel Configuration\n") - assert func(path) == "6.1.0_rc3" - - with open(path, "w") as handle: - handle.write("#\n" - "# Automatically generated file; DO NOT EDIT.\n" - "# no version here\n") - assert func(path) == "unknown" - - -def test_check_file(tmpdir, monkeypatch): - patch_config(monkeypatch) - func = pmb.parse.kconfig.check_file - path = f"{tmpdir}/config" - - # Fail the basic check - with open(path, "w") as handle: - handle.write("#\n" - "# Automatically generated file; DO NOT EDIT.\n" - "# Linux/arm64 3.10.93 Kernel Configuration\n" - "CONFIG_ARM64=y\n") - - func(path) is False - - # Pass the basic check - with open(path, "w") as handle: - handle.write("#\n" - "# Automatically generated file; DO NOT EDIT.\n" - "# Linux/arm64 3.10.93 Kernel Configuration\n" - "CONFIG_ARM64=y\n" - "BLK_DEV_INITRD=y\n" - "DEFAULT_HOSTNAME=\"(none)\"\n" - "BINFMT_ELF=y\n" - "DEVPTS_MULTIPLE_INSTANCES=y\n" - "LBDAF=y\n") - - func(path) is True diff --git a/test/test_pkgrel_bump.py b/test/test_pkgrel_bump.py deleted file mode 100644 index b6350a84..00000000 --- a/test/test_pkgrel_bump.py +++ /dev/null @@ -1,171 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -""" Test pmb.helper.pkgrel_bump """ -import glob -import os -import pytest -import sys - -import pmb_test # noqa -import pmb_test.git -import pmb.helpers.pkgrel_bump -import pmb.helpers.logging - - -@pytest.fixture -def args(request): - import pmb.parse - sys.argv = ["pmbootstrap.py", "chroot"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def pmbootstrap(args, tmpdir, parameters, zero_exit=True): - """ - Helper function for running pmbootstrap inside the fake work folder - (created by setup() below) with the binary repo disabled and with the - testdata configured as aports. - - :param parameters: what to pass to pmbootstrap, e.g. ["build", "testlib"] - :param zero_exit: expect pmbootstrap to exit with 0 (no error) - """ - # Run pmbootstrap - aports = tmpdir + "/_aports" - config = tmpdir + "/_pmbootstrap.cfg" - - # Copy .git dir to fake pmaports - dot_git = tmpdir + "/_aports/.git" - if not os.path.exists(dot_git): - pmb_test.git.copy_dotgit(args, aports) - - try: - pmb.helpers.run.user(args, ["./pmbootstrap.py", "--work=" + tmpdir, - "--mirror-pmOS=", "--aports=" + aports, - "--config=" + config] + parameters, - working_dir=pmb.config.pmb_src) - - # Verify that it exits as desired - except Exception as exc: - if zero_exit: - raise RuntimeError("pmbootstrap failed") from exc - else: - return - if not zero_exit: - raise RuntimeError("Expected pmbootstrap to fail, but it did not!") - - -def setup_work(args, tmpdir): - """ - Create fake work folder in tmpdir with everything symlinked except for the - built packages. The aports testdata gets copied to the tempfolder as - well, so it can be modified during testing. - """ - # Clean the chroots, and initialize the build chroot in the native chroot. - # We do this before creating the fake work folder, because then all - # packages are still present. - os.chdir(pmb.config.pmb_src) - pmb.helpers.run.user(args, ["./pmbootstrap.py", "-y", "zap"]) - pmb.helpers.run.user(args, ["./pmbootstrap.py", "build_init"]) - pmb.helpers.run.user(args, ["./pmbootstrap.py", "shutdown"]) - - # Link everything from work (except for "packages") to the tmpdir - for path in glob.glob(args.work + "/*"): - if os.path.basename(path) != "packages": - pmb.helpers.run.user(args, ["ln", "-s", path, tmpdir + "/"]) - - # Copy testdata and selected device aport - for folder in ["device/testing", "main"]: - pmb.helpers.run.user(args, ["mkdir", "-p", args.aports, tmpdir + - "/_aports/" + folder]) - path_original = pmb.helpers.pmaports.find(args, f"device-{args.device}") - pmb.helpers.run.user(args, ["cp", "-r", path_original, - f"{tmpdir}/_aports/device/testing"]) - for pkgname in ["testlib", "testapp", "testsubpkg"]: - pmb.helpers.run.user(args, ["cp", "-r", - "test/testdata/pkgrel_bump/aports/" - f"{pkgname}", - f"{tmpdir}/_aports/main/{pkgname}"]) - - # Copy pmaports.cfg - pmb.helpers.run.user(args, ["cp", args.aports + "/pmaports.cfg", tmpdir + - "/_aports"]) - - # Empty packages folder - channel = pmb.config.pmaports.read_config(args)["channel"] - packages_path = f"{tmpdir}/packages/{channel}" - pmb.helpers.run.user(args, ["mkdir", "-p", packages_path]) - pmb.helpers.run.user(args, ["chmod", "777", packages_path]) - - # Copy over the pmbootstrap config - pmb.helpers.run.user(args, ["cp", args.config, tmpdir + - "/_pmbootstrap.cfg"]) - - -def verify_pkgrels(tmpdir, pkgrel_testlib, pkgrel_testapp, - pkgrel_testsubpkg): - """ - Verify the pkgrels of the three test APKBUILDs ("testlib", "testapp", - "testsubpkg"). - """ - pmb.helpers.other.cache["apkbuild"] = {} - mapping = {"testlib": pkgrel_testlib, - "testapp": pkgrel_testapp, - "testsubpkg": pkgrel_testsubpkg} - for pkgname, pkgrel in mapping.items(): - # APKBUILD path - path = tmpdir + "/_aports/main/" + pkgname + "/APKBUILD" - - # Parse and verify - apkbuild = pmb.parse.apkbuild(path) - assert pkgrel == int(apkbuild["pkgrel"]) - - -def test_pkgrel_bump_high_level(args, tmpdir): - # Tempdir setup - tmpdir = str(tmpdir) - setup_work(args, tmpdir) - - # Make sure we don't try and cross compile - pmbootstrap(args, tmpdir, ["config", "build_default_device_arch", "False"]) - - # Let pkgrel_bump exit normally - pmbootstrap(args, tmpdir, ["build", "testlib", "testapp", "testsubpkg"]) - pmbootstrap(args, tmpdir, ["pkgrel_bump", "--dry", "--auto"]) - verify_pkgrels(tmpdir, 0, 0, 0) - - # Increase soname (testlib soname changes with the pkgrel) - pmbootstrap(args, tmpdir, ["pkgrel_bump", "testlib"]) - verify_pkgrels(tmpdir, 1, 0, 0) - pmbootstrap(args, tmpdir, ["build", "testlib"]) - pmbootstrap(args, tmpdir, ["pkgrel_bump", "--dry", "--auto"]) - verify_pkgrels(tmpdir, 1, 0, 0) - - # Delete package with previous soname (--auto-dry exits with >0 now) - channel = pmb.config.pmaports.read_config(args)["channel"] - arch = pmb.config.arch_native - apk_path = f"{tmpdir}/packages/{channel}/{arch}/testlib-1.0-r0.apk" - pmb.helpers.run.root(args, ["rm", apk_path]) - pmbootstrap(args, tmpdir, ["index"]) - pmbootstrap(args, tmpdir, ["pkgrel_bump", "--dry", "--auto"], False) - verify_pkgrels(tmpdir, 1, 0, 0) - - # Bump pkgrel and build testapp/testsubpkg - pmbootstrap(args, tmpdir, ["pkgrel_bump", "--auto"]) - verify_pkgrels(tmpdir, 1, 1, 1) - pmbootstrap(args, tmpdir, ["build", "testapp", "testsubpkg"]) - - # After rebuilding, pkgrel_bump --auto-dry exits with 0 - pmbootstrap(args, tmpdir, ["pkgrel_bump", "--dry", "--auto"]) - verify_pkgrels(tmpdir, 1, 1, 1) - - # Test running with specific package names - pmbootstrap(args, tmpdir, ["pkgrel_bump", "invalid_package_name"], False) - pmbootstrap(args, tmpdir, ["pkgrel_bump", "--dry", "testlib"], False) - verify_pkgrels(tmpdir, 1, 1, 1) - - # Clean up - pmbootstrap(args, tmpdir, ["shutdown"]) - pmb.helpers.run.root(args, ["rm", "-rf", tmpdir]) diff --git a/test/test_qemu_running_processes.py b/test/test_qemu_running_processes.py deleted file mode 100644 index 6416508c..00000000 --- a/test/test_qemu_running_processes.py +++ /dev/null @@ -1,191 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -""" -This file runs various installations and boots into them with QEMU, then checks -via SSH if expected processes are running. - -We use an extra config file (based on ~/.config/pmbootstrap.cfg), because we -need to change it a lot (e.g. UI, username, ...). -""" -import pytest -import sys -import shutil -import shlex -import time - -import pmb_test # noqa -import pmb.chroot.apk_static -import pmb.parse.apkindex -import pmb.helpers.logging -import pmb.helpers.run -import pmb.parse.bootimg - - -@pytest.fixture -def args(request): - import pmb.parse - sys.argv = ["pmbootstrap.py", "chroot"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def ssh_create_askpass_script(args): - """Create /tmp/y.sh, which we need to automatically login via SSH.""" - with open(args.work + "/chroot_native/tmp/y.sh", "w") as handle: - handle.write("#!/bin/sh\necho y\n") - pmb.chroot.root(args, ["chmod", "+x", "/tmp/y.sh"]) - - -def pmbootstrap_run(args, config, parameters, output="log"): - """Execute pmbootstrap.py with a test pmbootstrap.conf.""" - return pmb.helpers.run.user(args, ["./pmbootstrap.py", "-c", config] + - parameters, working_dir=pmb.config.pmb_src, - output=output) - - -def pmbootstrap_yes(args, config, parameters): - """ - Execute pmbootstrap.py with a test pmbootstrap.conf, and pipe "yes" into it - (so we can do a fully automated installation, using "y" as password - everywhere). Use --details-to-stdout to avoid the pmbootstrap process from - looking like it is hanging, when downloading packages with apk (otherwise - it would write no output, and get killed by the timeout). - """ - command = ("yes | ./pmbootstrap.py --details-to-stdout -c " + - shlex.quote(config)) - for parameter in parameters: - command += " " + shlex.quote(parameter) - return pmb.helpers.run.user(args, ["/bin/sh", "-c", command], - working_dir=pmb.config.pmb_src) - - -class QEMU(object): - def __init__(self, request): - self.process = None - request.addfinalizer(self.terminate) - - def terminate(self): - if self.process: - self.process.terminate() - else: - print("WARNING: The QEMU process wasn't set, so it could not be" - " terminated.") - - def run(self, args, tmpdir, ui="none"): - # Copy and adjust user's pmbootstrap.cfg - config = str(tmpdir) + "/pmbootstrap.cfg" - shutil.copyfile(args.config, config) - pmbootstrap_run(args, config, ["config", "device", "qemu-amd64"]) - pmbootstrap_run(args, config, ["config", "kernel", "virt"]) - pmbootstrap_run(args, config, ["config", "extra_packages", "none"]) - pmbootstrap_run(args, config, ["config", "user", "testuser"]) - pmbootstrap_run(args, config, ["config", "ui", ui]) - - # Prepare native chroot - pmbootstrap_run(args, config, ["-y", "zap"]) - pmb.chroot.apk.install(args, ["openssh-client"]) - ssh_create_askpass_script(args) - - # Create and run rootfs - pmbootstrap_yes(args, config, ["install"]) - self.process = pmbootstrap_run(args, config, ["qemu", "--display", - "none"], "background") - - -@pytest.fixture -def qemu(request): - return QEMU(request) - - -def ssh_run(args, command): - """ - Run a command in the QEMU VM on localhost via SSH. - - :param command: flat string of the command to execute, e.g. "ps au" - :returns: the result from the SSH server - """ - ret = pmb.chroot.user(args, ["SSH_ASKPASS=/tmp/y.sh", "DISPLAY=", "ssh", - "-o", "ConnectTimeout=10", - "-o", "UserKnownHostsFile=/dev/null", - "-o", "StrictHostKeyChecking=no", - "-p", "2222", "testuser@localhost", "--", - command], output_return=True, check=False) - return ret - - -def is_running(args, programs, timeout=300, sleep_before_retry=1): - """ - Simple check that looks for program names in the output of "ps ax". - This is error-prone, only use it with programs that have a unique name. - With defaults timeout and sleep_before_retry values, it will try keep - trying for 5 minutes, but not more than once per second. - - :param programs: list of programs to check for, e.g. ["xfce4-desktop"] - :param timeout: approximate time in seconds until timeout - :param sleep_before_retry: time in seconds to sleep before trying again - """ - print(f"Looking for programs to appear in the VM (timeout: {timeout}): " + - ", ".join(programs)) - ssh_works = False - - end = time.monotonic() + timeout - last_try = 0 - - while last_try < end: - # Sleep only when last try exited immediately - sleep = last_try - time.monotonic() + sleep_before_retry - if sleep > 0: - time.sleep(sleep) - last_try = time.monotonic() - - # Get running programs via SSH - all = ssh_run(args, "ps ax") - if not all: - continue - ssh_works = True - - # Missing programs - missing = [] - for program in programs: - if program not in all: - missing.append(program) - if not missing: - return True - - # Not found - print("ERROR: Timeout reached!") - if ssh_works: - print("Programs not running: " + ", ".join(missing)) - else: - print("Could not connect to the VM via SSH") - return False - - -@pytest.mark.skip_ci -def test_none(args, tmpdir, qemu): - qemu.run(args, tmpdir) - - # Check that at least SSH works (no special process running) - assert is_running(args, []) - - # self-test of is_running() - invalid-process should not be detected as - # running - assert is_running(args, ["invalid-process"], 1) is False - - -@pytest.mark.skip_ci -def test_xfce4(args, tmpdir, qemu): - qemu.run(args, tmpdir, "xfce4") - assert is_running(args, ["xfce4-session", "xfdesktop", "xfce4-panel", - "Thunar", "dbus-daemon", "xfwm4"]) - - -@pytest.mark.skip_ci -def test_plasma_mobile(args, tmpdir, qemu): - # NOTE: Once we have plasma mobile running properly without GL, we can - # check for more processes - qemu.run(args, tmpdir, "plasma-mobile") - assert is_running(args, ["polkitd"]) diff --git a/test/test_questions.py b/test/test_questions.py deleted file mode 100644 index 34e181b3..00000000 --- a/test/test_questions.py +++ /dev/null @@ -1,305 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import logging -import os -import pytest -import sys - -import pmb_test -import pmb_test.const -import pmb.aportgen.device -import pmb.config -import pmb.config.init -import pmb.helpers.logging -import pmb.parse.deviceinfo - - -@pytest.fixture -def args(tmpdir, request): - import pmb.parse - cfg = f"{pmb_test.const.testdata}/channels.cfg" - sys.argv = ["pmbootstrap.py", "--config-channels", cfg, "init"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def fake_answers(monkeypatch, answers): - """ - Patch pmb.helpers.cli.ask() function to return defined answers instead of - asking the user for an answer. - - :param answers: list of answer strings, e.g. ["y", "n", "invalid-device"]. - In this example, the first question is answered with "y", - the second question with "n" and so on. - """ - def fake_ask(question="Continue?", choices=["y", "n"], default="n", - lowercase_answer=True, validation_regex=None, complete=None): - answer = answers.pop(0) - logging.info("pmb.helpers.cli.ask() fake answer: " + answer) - return answer - monkeypatch.setattr(pmb.helpers.cli, "ask", fake_ask) - - -def test_fake_answers_selftest(monkeypatch): - fake_answers(monkeypatch, ["first", "second"]) - assert pmb.helpers.cli.ask() == "first" - assert pmb.helpers.cli.ask() == "second" - - -def test_questions_booleans(args, monkeypatch): - functions = [pmb.aportgen.device.ask_for_keyboard, - pmb.aportgen.device.ask_for_external_storage] - for func in functions: - fake_answers(monkeypatch, ["y", "n"]) - assert func(args) is True - assert func(args) is False - - -def test_questions_strings(args, monkeypatch): - functions = [pmb.aportgen.device.ask_for_manufacturer] - for func in functions: - fake_answers(monkeypatch, ["Simple string answer"]) - assert func() == "Simple string answer" - - -def test_questions_name(args, monkeypatch): - func = pmb.aportgen.device.ask_for_name - - # Manufacturer should get added automatically, but not twice - fake_answers(monkeypatch, ["Amazon Thor"]) - assert func("Amazon") == "Amazon Thor" - fake_answers(monkeypatch, ["Thor"]) - assert func("Amazon") == "Amazon Thor" - - # Don't add the manufacturer when it starts with "Google" - fake_answers(monkeypatch, ["Google Nexus 12345"]) - assert func("Amazon") == "Google Nexus 12345" - - -def test_questions_arch(args, monkeypatch): - fake_answers(monkeypatch, ["invalid_arch", "aarch64"]) - assert pmb.aportgen.device.ask_for_architecture() == "aarch64" - - -def test_questions_bootimg(args, monkeypatch): - func = pmb.aportgen.device.ask_for_bootimg - fake_answers(monkeypatch, ["invalid_path", ""]) - assert func(args) is None - - bootimg_path = pmb_test.const.testdata + "/bootimg/normal-boot.img" - fake_answers(monkeypatch, [bootimg_path]) - output = {"header_version": "0", - "base": "0x80000000", - "kernel_offset": "0x00008000", - "ramdisk_offset": "0x04000000", - "second_offset": "0x00f00000", - "tags_offset": "0x0e000000", - "pagesize": "2048", - "cmdline": "bootopt=64S3,32S1,32S1", - "qcdt": "false", - "dtb_second": "false"} - assert func(args) == output - - -def test_questions_device(args, monkeypatch): - # Prepare args - args.aports = pmb_test.const.testdata + "/init_questions_device/aports" - args.device = "lg-mako" - args.nonfree_firmware = True - args.nonfree_userland = False - args.kernel = "downstream" - - # Do not generate aports - def fake_generate(args, pkgname): - return - monkeypatch.setattr(pmb.aportgen, "generate", fake_generate) - - # Existing device (without non-free components so we have defaults there) - func = pmb.config.init.ask_for_device - nonfree = {"firmware": True, "userland": False} - fake_answers(monkeypatch, ["lg", "mako"]) - kernel = args.kernel - assert func(args) == ("lg-mako", True, kernel, nonfree) - - # Non-existing vendor, go back, existing vendor+device - fake_answers(monkeypatch, ["whoops", "n", "lg", "mako"]) - assert func(args) == ("lg-mako", True, kernel, nonfree) - - # Existing vendor, new device, go back, existing vendor+device - fake_answers(monkeypatch, ["lg", "nonexistent", "n", "lg", "mako"]) - assert func(args) == ("lg-mako", True, kernel, nonfree) - - # New vendor and new device (new port) - fake_answers(monkeypatch, ["new", "y", "device", "y"]) - assert func(args) == ("new-device", False, kernel, nonfree) - - # Existing vendor, new device (new port) - fake_answers(monkeypatch, ["lg", "nonexistent", "y"]) - assert func(args) == ("lg-nonexistent", False, kernel, nonfree) - - -def test_questions_device_kernel(args, monkeypatch): - # Prepare args - args.aports = pmb_test.const.testdata + "/init_questions_device/aports" - args.kernel = "downstream" - - # Kernel hardcoded in depends - func = pmb.config.init.ask_for_device_kernel - device = "lg-mako" - assert func(args, device) == args.kernel - - # Choose "mainline" - device = "sony-amami" - fake_answers(monkeypatch, ["mainline"]) - assert func(args, device) == "mainline" - - # Choose "downstream" - fake_answers(monkeypatch, ["downstream"]) - assert func(args, device) == "downstream" - - -def test_questions_device_nonfree(args, monkeypatch): - # Prepare args - args.aports = pmb_test.const.testdata + "/init_questions_device/aports" - args.nonfree_firmware = False - args.nonfree_userland = False - - # APKBUILD with firmware and userland (all yes) - func = pmb.config.init.ask_for_device_nonfree - device = "nonfree-firmware-and-userland" - fake_answers(monkeypatch, ["y", "y"]) - nonfree = {"firmware": True, "userland": True} - assert func(args, device) == nonfree - - # APKBUILD with firmware and userland (all no) - fake_answers(monkeypatch, ["n", "n"]) - nonfree = {"firmware": False, "userland": False} - assert func(args, device) == nonfree - - # APKBUILD with firmware only - func = pmb.config.init.ask_for_device_nonfree - device = "nonfree-firmware" - fake_answers(monkeypatch, ["y"]) - nonfree = {"firmware": True, "userland": False} - assert func(args, device) == nonfree - - # APKBUILD with userland only - func = pmb.config.init.ask_for_device_nonfree - device = "nonfree-userland" - fake_answers(monkeypatch, ["y"]) - nonfree = {"firmware": False, "userland": True} - assert func(args, device) == nonfree - - -def test_questions_flash_methods(args, monkeypatch): - func = pmb.aportgen.device.ask_for_flash_method - fake_answers(monkeypatch, ["invalid_flash_method", "fastboot"]) - assert func() == "fastboot" - - fake_answers(monkeypatch, ["0xffff"]) - assert func() == "0xffff" - - fake_answers(monkeypatch, ["heimdall", "invalid_type", "isorec"]) - assert func() == "heimdall-isorec" - - fake_answers(monkeypatch, ["heimdall", "bootimg"]) - assert func() == "heimdall-bootimg" - - -def test_questions_keymaps(args, monkeypatch): - func = pmb.config.init.ask_for_keymaps - fake_answers(monkeypatch, ["invalid_keymap", "us/rx51_us"]) - assert func(args, pmb.parse.deviceinfo(args, "nokia-n900")) == "us/rx51_us" - assert func(args, pmb.parse.deviceinfo(args, "lg-mako")) == "" - - -def test_questions_ui(args, monkeypatch): - args.aports = pmb_test.const.testdata + "/init_questions_device/aports" - device = "lg-mako" - info = pmb.parse.deviceinfo(args, device) - - fake_answers(monkeypatch, ["none"]) - assert pmb.config.init.ask_for_ui(args, info) == "none" - - fake_answers(monkeypatch, ["invalid_UI", "weston"]) - assert pmb.config.init.ask_for_ui(args, info) == "weston" - - -def test_questions_ui_extras(args, monkeypatch): - args.aports = pmb_test.const.testdata + "/init_questions_device/aports" - assert not pmb.config.init.ask_for_ui_extras(args, "none") - - fake_answers(monkeypatch, ["n"]) - assert not pmb.config.init.ask_for_ui_extras(args, "weston") - - fake_answers(monkeypatch, ["y"]) - assert pmb.config.init.ask_for_ui_extras(args, "weston") - - -def test_questions_work_path(args, monkeypatch, tmpdir): - # Existing paths (triggering various errors) - func = pmb.config.init.ask_for_work_path - tmpdir = str(tmpdir) - fake_answers(monkeypatch, ["/dev/null", os.path.dirname(__file__), - pmb.config.pmb_src, tmpdir]) - assert func(args) == (tmpdir, True) - - # Non-existing path - work = tmpdir + "/non_existing_subfolder" - fake_answers(monkeypatch, [work]) - assert func(args) == (work, False) - - -def test_questions_additional_options(args, monkeypatch): - func = pmb.config.init.ask_for_additional_options - cfg = {"pmbootstrap": {}} - - # Skip changing anything - fake_answers(monkeypatch, ["n"]) - func(args, cfg) - assert cfg == {"pmbootstrap": {}} - - # Answer everything - fake_answers(monkeypatch, ["y", "128", "64", "5", "2G", "n", "y", "1", - "n"]) - func(args, cfg) - mirror = pmb.config.defaults["mirrors_postmarketos"] - assert cfg == {"pmbootstrap": {"extra_space": "128", - "boot_size": "64", - "jobs": "5", - "ccache_size": "2G", - "sudo_timer": "False", - "mirrors_postmarketos": mirror}} - - -def test_questions_hostname(args, monkeypatch): - func = pmb.config.init.ask_for_hostname - device = "test-device" - - # Valid hostname - fake_answers(monkeypatch, ["valid"]) - assert func(args, device) == "valid" - - # Hostname too long ("aaaaa...") - fake_answers(monkeypatch, ["a" * 64, "a" * 63]) - assert func(args, device) == "a" * 63 - - # Fail the regex - fake_answers(monkeypatch, ["$invalid", "valid"]) - assert func(args, device) == "valid" - - # Begins or ends with minus - fake_answers(monkeypatch, ["-invalid", "invalid-", "valid"]) - assert func(args, device) == "valid" - - # Device name: empty string - fake_answers(monkeypatch, [device]) - assert func(args, device) == "" - - -def test_questions_channel(args, monkeypatch): - fake_answers(monkeypatch, ["invalid-channel", "v20.05"]) - assert pmb.config.init.ask_for_channel(args) == "v20.05" diff --git a/test/test_run_core.py b/test/test_run_core.py deleted file mode 100644 index d120fc0f..00000000 --- a/test/test_run_core.py +++ /dev/null @@ -1,175 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -""" Test pmb.helpers.run_core """ -import os -import pytest -import re -import subprocess -import sys -import time - -import pmb_test # noqa -import pmb.helpers.run_core - - -@pytest.fixture -def args(request): - import pmb.parse - sys.argv = ["pmbootstrap.py", "chroot"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_sanity_checks(): - func = pmb.helpers.run_core.sanity_checks - - # Invalid output - with pytest.raises(RuntimeError) as e: - func("invalid-output") - assert str(e.value).startswith("Invalid output value") - - # Background and check - func("background", check=None) - for check in [True, False]: - with pytest.raises(RuntimeError) as e: - func("background", check=check) - assert str(e.value).startswith("Can't use check with") - - # output_return - func("log", output_return=True) - with pytest.raises(RuntimeError) as e: - func("tui", output_return=True) - assert str(e.value).startswith("Can't use output_return with") - - -def test_background(args): - # Sleep in background - process = pmb.helpers.run_core.background(["sleep", "1"], "/") - - # Check if it is still running - assert process.poll() is None - - -def test_pipe(args): - # Sleep in background - process = pmb.helpers.run_core.pipe(["sleep", "1"], "/") - - # Check if it is still running - assert process.poll() is None - - # Print output in background - process = pmb.helpers.run_core.pipe(["echo", "-n", "hello"], "/") - - # Read output - assert process.communicate()[0].decode('utf-8') == "hello" - - -def test_foreground_pipe(args): - func = pmb.helpers.run_core.foreground_pipe - cmd = ["echo", "test"] - - # Normal run - assert func(args, cmd) == (0, "") - - # Return output - assert func(args, cmd, output_return=True) == (0, "test\n") - - # Kill with output timeout - cmd = ["sh", "-c", "echo first; sleep 2; echo second"] - args.timeout = 0.3 - ret = func(args, cmd, output_return=True, output_timeout=True) - assert ret == (-9, "first\n") - - # Kill with output timeout as root - cmd = pmb.config.sudo(["sh", "-c", "printf first; sleep 2; printf second"]) - args.timeout = 0.3 - ret = func(args, cmd, output_return=True, output_timeout=True, - sudo=True) - assert ret == (-9, "first") - - # Finish before timeout - cmd = ["sh", "-c", "echo first; sleep 0.1; echo second; sleep 0.1;" - "echo third; sleep 0.1; echo fourth"] - args.timeout = 0.2 - ret = func(args, cmd, output_return=True, output_timeout=True) - assert ret == (0, "first\nsecond\nthird\nfourth\n") - - # Check if all child processes are killed after timeout. - # The first command uses ps to get its process group id (pgid) and echo it - # to stdout. All of the test commands will be running under that pgid. - cmd = pmb.config.sudo([ - "sh", "-c", - "pgid=$(ps -o pgid= | grep ^${1:-$$});echo $pgid | tr -d '\n';" - "sleep 10 | sleep 20 | sleep 30" - ]) - args.timeout = 0.3 - ret = func(args, cmd, output_return=True, output_timeout=True, - sudo=True) - pgid = str(ret[1]) - - cmd = ["ps", "-e", "-o", "pgid,comm"] - ret = subprocess.run(cmd, check=True, stdout=subprocess.PIPE) - procs = str(ret.stdout.decode("utf-8")).rstrip().split('\n')[1:] - child_procs = [] - for process in procs: - items = process.split(maxsplit=1) - if len(items) != 2: - continue - if pgid == items[0] and "sleep" in items[1]: - child_procs.append(items) - assert len(child_procs) == 0 - - -def test_foreground_tui(): - func = pmb.helpers.run_core.foreground_tui - assert func(["echo", "test"]) == 0 - - -def test_core(args, monkeypatch): - # Background - func = pmb.helpers.run_core.core - msg = "test" - process = func(args, msg, ["sleep", "1"], output="background") - assert process.poll() is None - - # Foreground (TUI) - ret = func(args, msg, ["echo", "test"], output="tui") - assert ret == 0 - - # Foreground (pipe) - ret = func(args, msg, ["echo", "test"], output="log") - assert ret == 0 - - # Return output - ret = func(args, msg, ["echo", "test"], output="log", output_return=True) - assert ret == "test\n" - - # Check the return code - with pytest.raises(RuntimeError) as e: - func(args, msg, ["false"], output="log") - assert re.search(r"^Command failed \(exit code -?\d*\): ", str(e.value)) - - # Kill with timeout - args.timeout = 0.2 - with pytest.raises(RuntimeError) as e: - func(args, msg, ["sleep", "1"], output="log") - assert re.search(r"^Command failed \(exit code -?\d*\): ", str(e.value)) - - # Preserve proxy environment variables - monkeypatch.setattr(os, "environ", {"FTP_PROXY": "testproxy"}) - ret = func(args, msg, ["sh", "-c", 'echo "$FTP_PROXY"'], output_return=True) - assert ret == "testproxy\n" - - -@pytest.mark.skip_ci -def test_sudo_timer(args): - pmb.helpers.run.root(args, ["whoami"]) - - time.sleep(300) - - out = pmb.helpers.run.root(args, ["whoami"]) - - assert out == 0 diff --git a/test/test_shell_escape.py b/test/test_shell_escape.py deleted file mode 100644 index d2c57ee9..00000000 --- a/test/test_shell_escape.py +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import sys -import pytest - -import pmb_test # noqa -import pmb.chroot.root -import pmb.chroot.user -import pmb.helpers.run -import pmb.helpers.run_core -import pmb.helpers.logging - - -@pytest.fixture -def args(request): - import pmb.parse - sys.argv = ["pmbootstrap.py", "chroot"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_shell_escape(args): - cmds = {"test\n": ["echo", "test"], - "test && test\n": ["echo", "test", "&&", "test"], - "test ; test\n": ["echo", "test", ";", "test"], - "'test\"test\\'\n": ["echo", "'test\"test\\'"], - "*\n": ["echo", "*"], - "$PWD\n": ["echo", "$PWD"], - "hello world\n": ["printf", "%s world\n", "hello"]} - for expected, cmd in cmds.items(): - copy = list(cmd) - core = pmb.helpers.run_core.core(args, str(cmd), cmd, - output_return=True) - assert expected == core - assert cmd == copy - - user = pmb.helpers.run.user(args, cmd, output_return=True) - assert expected == user - assert cmd == copy - - root = pmb.helpers.run.root(args, cmd, output_return=True) - assert expected == root - assert cmd == copy - - chroot_root = pmb.chroot.root(args, cmd, output_return=True) - assert expected == chroot_root - assert cmd == copy - - chroot_user = pmb.chroot.user(args, cmd, output_return=True) - assert expected == chroot_user - assert cmd == copy - - -def test_shell_escape_env(args): - key = "PMBOOTSTRAP_TEST_ENVIRONMENT_VARIABLE" - value = "long value with spaces and special characters: '\"\\!$test" - env = {key: value} - cmd = ["sh", "-c", "env | grep " + key + " | grep -v SUDO_COMMAND"] - ret = key + "=" + value + "\n" - - copy = list(cmd) - func = pmb.helpers.run.user - assert func(args, cmd, output_return=True, env=env) == ret - assert cmd == copy - - func = pmb.helpers.run.root - assert func(args, cmd, output_return=True, env=env) == ret - assert cmd == copy - - func = pmb.chroot.root - assert func(args, cmd, output_return=True, env=env) == ret - assert cmd == copy - - func = pmb.chroot.user - assert func(args, cmd, output_return=True, env=env) == ret - assert cmd == copy - - -def test_flat_cmd_simple(): - func = pmb.helpers.run_core.flat_cmd - cmd = ["echo", "test"] - working_dir = None - ret = "echo test" - env = {} - assert func(cmd, working_dir, env) == ret - - -def test_flat_cmd_wrap_shell_string_with_spaces(): - func = pmb.helpers.run_core.flat_cmd - cmd = ["echo", "string with spaces"] - working_dir = None - ret = "echo 'string with spaces'" - env = {} - assert func(cmd, working_dir, env) == ret - - -def test_flat_cmd_wrap_env_simple(): - func = pmb.helpers.run_core.flat_cmd - cmd = ["echo", "test"] - working_dir = None - ret = "JOBS=5 echo test" - env = {"JOBS": "5"} - assert func(cmd, working_dir, env) == ret - - -def test_flat_cmd_wrap_env_spaces(): - func = pmb.helpers.run_core.flat_cmd - cmd = ["echo", "test"] - working_dir = None - ret = "JOBS=5 TEST='spaces string' echo test" - env = {"JOBS": "5", "TEST": "spaces string"} - assert func(cmd, working_dir, env) == ret diff --git a/test/test_version.py b/test/test_version.py deleted file mode 100644 index e09dd04e..00000000 --- a/test/test_version.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import sys -import pytest - -import pmb_test -import pmb_test.const -import pmb.helpers.git -import pmb.helpers.logging -import pmb.parse.version - - -@pytest.fixture -def args(request): - import pmb.parse - sys.argv = ["pmbootstrap.py", "chroot"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_version(args): - # Fail after the first error or print a grand total of failures - keep_going = False - - # Iterate over the version tests from apk-tools - path = pmb_test.const.testdata + "/version/version.data" - mapping = {-1: "<", 0: "=", 1: ">"} - count = 0 - errors = [] - with open(path) as handle: - for line in handle: - split = line.split(" ") - a = split[0] - b = split[2].split("#")[0].rstrip() - expected = split[1] - print("(#" + str(count) + ") " + line.rstrip()) - result = pmb.parse.version.compare(a, b) - real = mapping[result] - - count += 1 - if real != expected: - if keep_going: - errors.append(line.rstrip() + " (got: '" + real + "')") - else: - assert real == expected - - print("---") - print("total: " + str(count)) - print("errors: " + str(len(errors))) - print("---") - for error in errors: - print(error) - assert errors == [] - - -def test_version_check_string(): - func = pmb.parse.version.check_string - assert func("3.2.4", ">=0.0.0") is True - assert func("3.2.4", ">=3.2.4") is True - assert func("3.2.4", "<4.0.0") is True - - assert func("0.0.0", ">=0.0.1") is False - assert func("4.0.0", "<4.0.0") is False - assert func("4.0.1", "<4.0.0") is False - - assert func("5.2.0_rc3", "<5.2.0") is False - assert func("5.2.0_rc3", ">=5.2.0") is True diff --git a/test/test_version_validate.py b/test/test_version_validate.py deleted file mode 100644 index 5f5b9c6b..00000000 --- a/test/test_version_validate.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -import pmb_test # noqa -import pmb.parse.version - - -def test_version_validate(): - func = pmb.parse.version.validate - - assert func("6.0_1") is False - assert func("6.0_invalidsuffix1") is False - assert func("6.0.0002") is True - assert func("6.0.234") is True - - # Issue #1144 - assert func("6.0_0002") is False diff --git a/test/test_zzz_keys.py b/test/test_zzz_keys.py deleted file mode 100644 index 13b37cb9..00000000 --- a/test/test_zzz_keys.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright 2023 Oliver Smith -# SPDX-License-Identifier: GPL-3.0-or-later -# This file has a _zzz_ prefix so it runs last, because it tends to fail on -# sourcehut currently. Related to some CDN caching issue probably. -import os -import sys -import pytest -import glob -import filecmp - -import pmb_test # noqa -import pmb.parse.apkindex -import pmb.helpers.logging -import pmb.config - - -@pytest.fixture -def args(request): - import pmb.parse - sys.argv = ["pmbootstrap.py", "chroot"] - args = pmb.parse.arguments() - args.log = args.work + "/log_testsuite.txt" - pmb.helpers.logging.init(args) - request.addfinalizer(pmb.helpers.logging.logfd.close) - return args - - -def test_keys(args): - # Get the alpine-keys apk filename - pmb.chroot.init(args) - version = pmb.parse.apkindex.package(args, "alpine-keys")["version"] - pattern = (args.work + "/cache_apk_" + pmb.config.arch_native + - "/alpine-keys-" + version + ".*.apk") - filename = os.path.basename(glob.glob(pattern)[0]) - - # Extract it to a temporary folder - temp = "/tmp/test_keys_extract" - temp_outside = args.work + "/chroot_native" + temp - if os.path.exists(temp_outside): - pmb.chroot.root(args, ["rm", "-r", temp]) - pmb.chroot.user(args, ["mkdir", "-p", temp]) - pmb.chroot.user(args, ["tar", "xvf", "/var/cache/apk/" + filename], - working_dir=temp) - - # Get all relevant key file names as {"filename": "full_outside_path"} - keys_upstream = {} - for arch in pmb.config.build_device_architectures + ["x86_64"]: - pattern = temp_outside + "/usr/share/apk/keys/" + arch + "/*.pub" - for path in glob.glob(pattern): - keys_upstream[os.path.basename(path)] = path - assert len(keys_upstream) - - # Check if the keys are mirrored correctly - mirror_path_keys = pmb.config.apk_keys_path - for key, original_path in keys_upstream.items(): - mirror_path = mirror_path_keys + "/" + key - assert filecmp.cmp(mirror_path, original_path, False) - - # Find postmarketOS keys - keys_pmos = ["build.postmarketos.org.rsa.pub"] - for key in keys_pmos: - assert os.path.exists(mirror_path_keys + "/" + key) - - # Find outdated keys, which need to be removed - glob_result = glob.glob(mirror_path_keys + "/*.pub") - assert len(glob_result) - for path in glob_result: - key = os.path.basename(key) - assert key in keys_pmos or key in keys_upstream diff --git a/test/testdata/apkbuild/APKBUILD.depends-in-depends b/test/testdata/apkbuild/APKBUILD.depends-in-depends deleted file mode 100644 index c7f5d88a..00000000 --- a/test/testdata/apkbuild/APKBUILD.depends-in-depends +++ /dev/null @@ -1,8 +0,0 @@ -pkgname="depends-in-depends" -pkgver="1.0.0" -pkgrel=0 -arch="armhf" -depends="first" -depends="$depends second" -depends="${depends} third" -pkgdesc="depends-in-depends test" diff --git a/test/testdata/apkbuild/APKBUILD.lint b/test/testdata/apkbuild/APKBUILD.lint deleted file mode 100644 index 8030e7cc..00000000 --- a/test/testdata/apkbuild/APKBUILD.lint +++ /dev/null @@ -1,33 +0,0 @@ -# APKBUILD to test 'pmbootstrap lint', used by test/test_helpers_lint.py -# Maintainer: Oliver Smith -# Co-Maintainer: Hello World -pkgname=hello-world -pkgver=1 -pkgrel=5 -pkgdesc="hello world program to be built in the testsuite" -url="https://en.wikipedia.org/wiki/%22Hello,_World!%22_program" -arch="all" -license="MIT" -source="main.c Makefile" - -# has pmbootstrap specific options (https://postmarketos.org/apkbuild-options) -options="!tracedeps pmb:cross-native pmb:strict !archcheck" - -build() { - cd "$srcdir" - make -} - -check() { - cd "$srcdir" - printf 'hello, world!\n' > expected - ./hello-world > real - diff -q expected real -} - -package() { - install -D -m755 "$srcdir"/hello-world \ - "$pkgdir"/usr/bin/hello-world -} -sha512sums="62385af6a68cd4e0c03b15992bb9f1d20b8d6c8a33724ca2d28629a139e95016d0502257f8a3a8be53eef30e11b3e372a2469cb1989dbd387ebea4464a9273ee main.c -80c32948d3254f5e4f9084d73754824e7d7d7d117770b041a1a13baf056773de265153fe518cc3e735db55b638411aa6fbd0e17b5b674dfc89e69a9391fbd3bb Makefile" diff --git a/test/testdata/apkbuild/APKBUILD.linux-envkernel-test b/test/testdata/apkbuild/APKBUILD.linux-envkernel-test deleted file mode 100644 index 4166c917..00000000 --- a/test/testdata/apkbuild/APKBUILD.linux-envkernel-test +++ /dev/null @@ -1,20 +0,0 @@ -pkgname="linux-envkernel-test" - -package() { - install -Dm644 "$srcdir"/build/arch/arm/boot/dt.img \ - "$pkgdir"/boot/dt.img - - install -Dm644 "$srcdir"/build/arch/arm/boot/zImage-dtb \ - "$pkgdir"/boot/vmlinuz-$_flavor - - install -D "$srcdir"/build/include/config/kernel.release \ - "$pkgdir"/usr/share/kernel/$_flavor/kernel.release - - cd "$srcdir"/build - unset LDFLAGS - - echo "--[ Installing modules ]--" - make ARCH="$_carch" CC="${CC:-gcc}" \ - KBUILD_BUILD_VERSION="$((pkgrel + 1))-Alpine" CONFIG_NO_ERROR_ON_MISMATCH=y \ - INSTALL_MOD_PATH="$pkgdir" INSTALL_MOD_STRIP=1 modules_install -} diff --git a/test/testdata/apkbuild/APKBUILD.missing-pkgdesc-in-subpackage b/test/testdata/apkbuild/APKBUILD.missing-pkgdesc-in-subpackage deleted file mode 100644 index 280da8ed..00000000 --- a/test/testdata/apkbuild/APKBUILD.missing-pkgdesc-in-subpackage +++ /dev/null @@ -1,10 +0,0 @@ -# Reference: -# Unmaintained: This is broken! -pkgname="missing-pkgdesc-in-subpackage" -arch="noarch" -subpackages="$pkgname-subpackage invalid-function:does_not_exist" - -subpackage() { - # this function does not have a pkgdesc - mkdir "$subpkgdir" -} diff --git a/test/testdata/apkbuild/APKBUILD.subpackages b/test/testdata/apkbuild/APKBUILD.subpackages deleted file mode 100644 index 5570604e..00000000 --- a/test/testdata/apkbuild/APKBUILD.subpackages +++ /dev/null @@ -1,13 +0,0 @@ -pkgname="subpackages" -arch="noarch" -subpackages="simple custom:custom_function" -depends="postmarketos-base" - -simple() { - mkdir "$subpkgdir" -} - -custom_function() { - pkgdesc="This is one of the custom $pkgname" - depends="$depends glibc" -} diff --git a/test/testdata/apkbuild/APKBUILD.variable-replacements b/test/testdata/apkbuild/APKBUILD.variable-replacements deleted file mode 100644 index c180e45c..00000000 --- a/test/testdata/apkbuild/APKBUILD.variable-replacements +++ /dev/null @@ -1,12 +0,0 @@ -pkgname="variable-replacements" -pkgver="1.0.0" -pkgrel=0 -arch="armhf" -pkgdesc="$pkgdesc$pkgname test" -url="${pkgname/variable-} ${pkgname/-replacements/} ${pkgname/variable/string}" -subpackages="${pkgdesc#variable-}:test_subpkg_func" -pkgdesc="this should not affect variable replacement" - -test_subpkg_func() { - mkdir "$subpkgdir" -} diff --git a/test/testdata/apkbuild/APKBUILD.weird-pkgver b/test/testdata/apkbuild/APKBUILD.weird-pkgver deleted file mode 100644 index a4754416..00000000 --- a/test/testdata/apkbuild/APKBUILD.weird-pkgver +++ /dev/null @@ -1,8 +0,0 @@ -pkgname=dart-stage0 -pkgver=3.0.0_alpha369-r0 -pkgrel=1 -pkgdesc="Dart is a client-optimized language for fast apps on any platform (temporary bootstrap package)" -url="https://dart.dev/" -arch="aarch64 armv7 x86_64" -license="BSD-3-Clause" -options="!check" diff --git a/test/testdata/apkindex/conflict b/test/testdata/apkindex/conflict deleted file mode 100644 index ebf3b827..00000000 --- a/test/testdata/apkindex/conflict +++ /dev/null @@ -1,20 +0,0 @@ -C:Q1XaZzCVZ9mvH8djPyEb5aUYhG3r4= -P:hello-world -V:2-r0 -A:x86_64 -S:2897 -I:20480 -T:hello world program to be built in the testsuite -U:https://en.wikipedia.org/wiki/%22Hello,_World!%22_program -L:MIT -o:hello-world -t:1500000000 -c: -D:!conflict so:libc.musl-x86_64.so.1 -p:cmd:hello-world -F:usr -F:usr/bin -R:hello-world -a:0:0:755 -Z:Q1ZjTpsnMchSsSwEPB1cTjihYuJvo= - diff --git a/test/testdata/apkindex/key_missing b/test/testdata/apkindex/key_missing deleted file mode 100644 index c0998966..00000000 --- a/test/testdata/apkindex/key_missing +++ /dev/null @@ -1,23 +0,0 @@ -C:Q1gKkFdQUwKAmcUpGY8VaErq0uHNo= -P:musl -A:x86_64 -S:357094 -I:581632 -T:the musl c library (libc) implementation -U:http://www.musl-libc.org/ -L:MIT -o:musl -m:Timo Ter s -t:1515217616 -c:6cc1d4e6ac35607dd09003e4d013a0d9c4800c49 -p:so:libc.musl-x86_64.so.1=1 -F:lib -R:libc.musl-x86_64.so.1 -a:0:0:777 -Z:Q17yJ3JFNypA4mxhJJr0ou6CzsJVI= -R:ld-musl-x86_64.so.1 -a:0:0:755 -Z:Q1DadJ0cqdT+ImyeY5FgTdZWaLnyQ= -F:usr -F:usr/lib - diff --git a/test/testdata/apkindex/key_twice b/test/testdata/apkindex/key_twice deleted file mode 100644 index 467162fc..00000000 --- a/test/testdata/apkindex/key_twice +++ /dev/null @@ -1,25 +0,0 @@ -C:Q1gKkFdQUwKAmcUpGY8VaErq0uHNo= -P:musl -V:1.1.18-r5 -V:1.1.18-r5 -A:x86_64 -S:357094 -I:581632 -T:the musl c library (libc) implementation -U:http://www.musl-libc.org/ -L:MIT -o:musl -m:Timo Ter s -t:1515217616 -c:6cc1d4e6ac35607dd09003e4d013a0d9c4800c49 -p:so:libc.musl-x86_64.so.1=1 -F:lib -R:libc.musl-x86_64.so.1 -a:0:0:777 -Z:Q17yJ3JFNypA4mxhJJr0ou6CzsJVI= -R:ld-musl-x86_64.so.1 -a:0:0:755 -Z:Q1DadJ0cqdT+ImyeY5FgTdZWaLnyQ= -F:usr -F:usr/lib - diff --git a/test/testdata/apkindex/new_line_missing b/test/testdata/apkindex/new_line_missing deleted file mode 100644 index 7f907123..00000000 --- a/test/testdata/apkindex/new_line_missing +++ /dev/null @@ -1,23 +0,0 @@ -C:Q1gKkFdQUwKAmcUpGY8VaErq0uHNo= -P:musl -V:1.1.18-r5 -A:x86_64 -S:357094 -I:581632 -T:the musl c library (libc) implementation -U:http://www.musl-libc.org/ -L:MIT -o:musl -m:Timo Ter s -t:1515217616 -c:6cc1d4e6ac35607dd09003e4d013a0d9c4800c49 -p:so:libc.musl-x86_64.so.1=1 -F:lib -R:libc.musl-x86_64.so.1 -a:0:0:777 -Z:Q17yJ3JFNypA4mxhJJr0ou6CzsJVI= -R:ld-musl-x86_64.so.1 -a:0:0:755 -Z:Q1DadJ0cqdT+ImyeY5FgTdZWaLnyQ= -F:usr -F:usr/lib diff --git a/test/testdata/apkindex/no_error b/test/testdata/apkindex/no_error deleted file mode 100644 index c5a0c3d8..00000000 --- a/test/testdata/apkindex/no_error +++ /dev/null @@ -1,45 +0,0 @@ -C:Q1gKkFdQUwKAmcUpGY8VaErq0uHNo= -P:musl -V:1.1.18-r5 -A:x86_64 -S:357094 -I:581632 -T:the musl c library (libc) implementation -U:http://www.musl-libc.org/ -L:MIT -o:musl -m:Timo Ter s -t:1515217616 -c:6cc1d4e6ac35607dd09003e4d013a0d9c4800c49 -p:so:libc.musl-x86_64.so.1=1 -F:lib -R:libc.musl-x86_64.so.1 -a:0:0:777 -Z:Q17yJ3JFNypA4mxhJJr0ou6CzsJVI= -R:ld-musl-x86_64.so.1 -a:0:0:755 -Z:Q1DadJ0cqdT+ImyeY5FgTdZWaLnyQ= -F:usr -F:usr/lib - -C:Q1iundrWyXyQtSTZ9h2qqh44cZcYA= -P:curl -V:7.57.0-r0 -A:x86_64 -S:118233 -I:217088 -T:An URL retrieval utility and library -U:http://curl.haxx.se -L:MIT -o:curl -m:Natanael Copa -t:1512030418 -c:d19c5b26c70a3055c5d6c7d2f15587f62a33a1fe -D:ca-certificates so:libc.musl-x86_64.so.1 so:libcurl.so.4 so:libz.so.1 -p:cmd:curl -F:usr -F:usr/bin -R:curl -a:0:0:755 -Z:Q1tlqDmZcIJJXo+ScFT6Nd31EPrBM= - diff --git a/test/testdata/apkindex/virtual_package b/test/testdata/apkindex/virtual_package deleted file mode 100644 index fa2f24d1..00000000 --- a/test/testdata/apkindex/virtual_package +++ /dev/null @@ -1,31 +0,0 @@ -C:Q1XaZzCVZ9mvH8djPyEb5aUYhG3r4= -P:hello-world -V:2-r0 -A:x86_64 -S:2897 -I:20480 -T:hello world program to be built in the testsuite -U:https://en.wikipedia.org/wiki/%22Hello,_World!%22_program -L:MIT -o:hello-world -t:1500000000 -c: -D:so:libc.musl-x86_64.so.1 -p:cmd:hello-world -F:usr -F:usr/bin -R:hello-world -a:0:0:755 -Z:Q1ZjTpsnMchSsSwEPB1cTjihYuJvo= - -C:Q127l1Ui9vzedbeR3BMelZnSa4pwY= -P:.pmbootstrap -V:0 -A:noarch -S:0 -I:0 -T:virtual meta package -U: -L: -D:hello-world - diff --git a/test/testdata/aportgen/aports/main/binutils/APKBUILD b/test/testdata/aportgen/aports/main/binutils/APKBUILD deleted file mode 100644 index 01a4003c..00000000 --- a/test/testdata/aportgen/aports/main/binutils/APKBUILD +++ /dev/null @@ -1,143 +0,0 @@ -# Contributor: Natanael Copa -# Maintainer: Ariadne Conill -pkgname=binutils -pkgver=2.39 -pkgrel=1 -pkgdesc="Tools necessary to build programs" -url="https://www.gnu.org/software/binutils/" -makedepends_build="bison flex texinfo" -makedepends_host="zlib-dev" -makedepends="$makedepends_build $makedepends_host" -arch="all" -license="GPL-2.0 GPL-3.0-or-later LGPL-2.0 BSD" -subpackages="$pkgname-dev $pkgname-doc" -source="https://ftp.gnu.org/gnu/binutils/binutils-$pkgver.tar.xz - binutils-ld-fix-static-linking.patch - gold-mips.patch - ld-bfd-mips.patch - 0001-Revert-PR25882-.gnu.attributes-are-not-checked-for-s.patch - binutils-mips-disable-assert.patch - " -builddir="$srcdir/$pkgname-$pkgver" - -if [ "$CHOST" = "$CBUILD" ] && [ "$CBUILD" = "$CTARGET" ] && [ "$CTARGET_ARCH" != "riscv64" ]; then - subpackages="$subpackages $pkgname-gold" -fi - -if [ "$CHOST" != "$CTARGET" ]; then - pkgname="$pkgname-$CTARGET_ARCH" - subpackages="" - sonameprefix="$pkgname:" -fi - -# secfixes: -# 2.35.2-r1: -# - CVE-2021-3487 -# 2.32-r0: -# - CVE-2018-19931 -# - CVE-2018-19932 -# - CVE-2018-20002 -# - CVE-2018-20712 -# 2.28-r1: -# - CVE-2017-7614 - -build() { - local _sysroot=/ - local _cross_configure="--enable-install-libiberty --enable-shared" - local _arch_configure="" - local _gold_configure="--disable-gold" - local _plugin_configure="--enable-plugins" - - if [ "$CHOST" != "$CTARGET" ]; then - _sysroot="$CBUILDROOT" - _cross_configure="--disable-install-libiberty" - _plugin_configure="--disable-plugins" - fi - - if [ "$CHOST" = "$CBUILD" ] && [ "$CBUILD" = "$CTARGET" ] && [ "$CTARGET_ARCH" != "riscv64" ]; then - _gold_configure="--enable-gold" - fi - - if [ "$CTARGET_ARCH" = "x86_64" ]; then - _arch_configure="--enable-targets=x86_64-pep" - fi - - if [ "$CTARGET_ARCH" = "riscv64" ]; then - _gold_configure="--disable-gold" - fi - - case "$CTARGET_ARCH" in - mips*) _hash_style_configure="--enable-default-hash-style=sysv" ;; - *) _hash_style_configure="--enable-default-hash-style=gnu" ;; - esac - - ./configure \ - --build=$CBUILD \ - --host=$CHOST \ - --target=$CTARGET \ - --with-build-sysroot="$CBUILDROOT" \ - --with-sysroot=$_sysroot \ - --prefix=/usr \ - --mandir=/usr/share/man \ - --infodir=/usr/share/info \ - --disable-multilib \ - --disable-gprofng \ - --enable-ld=default \ - $_gold_configure \ - --enable-64-bit-bfd \ - $_plugin_configure \ - --enable-relro \ - --enable-deterministic-archives \ - --enable-default-execstack=no \ - $_cross_configure \ - $_arch_configure \ - $_hash_style_configure \ - --with-pic \ - --disable-werror \ - --disable-nls \ - --with-mmap \ - --with-system-zlib - make -} - -package() { - make install DESTDIR="$pkgdir" - if [ -d "$pkgdir"/usr/lib64 ]; then - mv "$pkgdir"/usr/lib64/* "$pkgdir"/usr/lib/ - rmdir "$pkgdir"/usr/lib64 - fi - if [ "$CHOST" != "$CTARGET" ]; then - # creating cross tools: remove any files that would conflict - # with the native tools, or other cross tools - rm -r "${pkgdir:?}"/usr/share - rm -f "$pkgdir"/usr/lib/libiberty.a - rm -r "${pkgdir:?}"/usr/lib/bfd-plugins - fi -} - -libs() { - pkgdesc="Runtime libraries from binutils - libbfd and libopcodes" - - mkdir -p "$subpkgdir"/usr/lib - mv "$pkgdir"/usr/lib/lib*.so "$subpkgdir"/usr/lib/ -} - -gold() { - pkgdesc="GNU binutils - gold linker" - - if [ -e "$pkgdir"/usr/bin/ld.gold ]; then - mkdir -p "$subpkgdir"/usr/bin - mv "$pkgdir"/usr/bin/ld.gold "$subpkgdir"/usr/bin - fi - mkdir -p "$subpkgdir"/usr/$CTARGET/bin - mv "$pkgdir"/usr/$CTARGET/bin/ld.gold "$subpkgdir"/usr/$CTARGET/bin/ld.gold -} - -sha512sums=" -68e038f339a8c21faa19a57bbc447a51c817f47c2e06d740847c6e9cc3396c025d35d5369fa8c3f8b70414757c89f0e577939ddc0d70f283182504920f53b0a3 binutils-2.39.tar.xz -ecee33b0e435aa704af1c334e560f201638ff79e199aa11ed78a72f7c9b46f85fbb227af5748e735fd681d1965fcc42ac81b0c8824e540430ce0c706c81e8b49 binutils-ld-fix-static-linking.patch -f55cf2e0bf82f97583a1abe10710e4013ecf7d64f1da2ef8659a44a06d0dd8beaf58dab98a183488ea137f03e32d62efc878d95f018f836f8cec870bc448556f gold-mips.patch -314d2ef9071c89940aa6c8118e8a1e2f191a5d0a4bf596da1ad9cc84f884d8bc7dea8bd7b9fc3f8f1bddd3fd41c6eb017e1e804044b3bf084df1ed9e6e095e2d ld-bfd-mips.patch -70ec22bd72ef6dddecfd970613387dd4a8cdc8730dd3cbf03d5a0c3a7c4d839383167bb06dad21bf7c235329fd44b5dc4aefe762f68544f17155cf002bf1be4a 0001-Revert-PR25882-.gnu.attributes-are-not-checked-for-s.patch -609cd90d8b334eb309f586b17b9d335a08d3dbb6def7c3eb5c010028fcb681674031e5b9d853aa7a39a50304356a86afc184b85562b3f228f8197f4d29395c8f binutils-mips-disable-assert.patch -" diff --git a/test/testdata/aportgen/aports/main/gcc/APKBUILD b/test/testdata/aportgen/aports/main/gcc/APKBUILD deleted file mode 100644 index d31b99f8..00000000 --- a/test/testdata/aportgen/aports/main/gcc/APKBUILD +++ /dev/null @@ -1,792 +0,0 @@ -# Contributor: Natanael Copa -# Contributor: Sören Tempel -# Maintainer: Ariadne Conill -pkgname=gcc -_pkgbase=12.2.1 # must match gcc/BASE-VER -_pkgsnap=20220924 -pkgver=${_pkgbase}_git${_pkgsnap} -[ "$BOOTSTRAP" = "nolibc" ] && pkgname="gcc-pass2" -[ "$CBUILD" != "$CHOST" ] && _cross="-$CARCH" || _cross="" -[ "$CHOST" != "$CTARGET" ] && _target="-$CTARGET_ARCH" || _target="" - -pkgname="$pkgname$_target" -pkgrel=1 -pkgdesc="The GNU Compiler Collection" -url="https://gcc.gnu.org" -arch="all" -license="GPL-2.0-or-later LGPL-2.1-or-later" -_gccrel=$pkgver-r$pkgrel -depends="binutils$_target" -makedepends_build="gcc$_cross g++$_cross bison flex texinfo gawk zip gmp-dev mpfr-dev mpc1-dev zlib-dev" -makedepends_host="linux-headers gmp-dev mpfr-dev mpc1-dev isl-dev zlib-dev !gettext-dev libucontext-dev" -subpackages=" " -[ "$CHOST" = "$CTARGET" ] && subpackages="gcc-doc$_target" -replaces="libstdc++ binutils" - -: "${LANG_CXX:=true}" -: "${LANG_D:=true}" -: "${LANG_OBJC:=true}" -: "${LANG_GO:=true}" -: "${LANG_FORTRAN:=true}" -: "${LANG_ADA:=true}" -: "${LANG_JIT:=true}" - -_libgomp=true -_libgcc=true -_libatomic=true -_libitm=true - -if [ "$CHOST" != "$CTARGET" ]; then - if [ "$BOOTSTRAP" = nolibc ]; then - LANG_CXX=false - LANG_ADA=false - _libgcc=false - _builddir="$srcdir/build-cross-pass2" - else - _builddir="$srcdir/build-cross-final" - fi - LANG_OBJC=false - LANG_GO=false - LANG_FORTRAN=false - LANG_D=false - LANG_JIT=false - _libgomp=false - _libatomic=false - _libitm=false - - # reset target flags (should be set in crosscreate abuild) - # fixup flags. seems gcc treats CPPFLAGS as global without - # _FOR_xxx variants. wrap it in CFLAGS and CXXFLAGS. - export CFLAGS="$CPPFLAGS $CFLAGS" - export CXXFLAGS="$CPPFLAGS $CXXFLAGS" - unset CPPFLAGS - export CFLAGS_FOR_TARGET=" " - export CXXFLAGS_FOR_TARGET=" " - export LDFLAGS_FOR_TARGET=" " - - STRIP_FOR_TARGET="$CTARGET-strip" -elif [ "$CBUILD" != "$CHOST" ]; then - # fixup flags. seems gcc treats CPPFLAGS as global without - # _FOR_xxx variants. wrap it in CFLAGS and CXXFLAGS. - export CFLAGS="$CPPFLAGS $CFLAGS" - export CXXFLAGS="$CPPFLAGS $CXXFLAGS" - unset CPPFLAGS - - # reset flags and cc for build - export CC_FOR_BUILD="gcc" - export CXX_FOR_BUILD="g++" - export CFLAGS_FOR_BUILD=" " - export CXXFLAGS_FOR_BUILD=" " - export LDFLAGS_FOR_BUILD=" " - export CFLAGS_FOR_TARGET=" " - export CXXFLAGS_FOR_TARGET=" " - export LDFLAGS_FOR_TARGET=" " - - # Languages that do not need bootstrapping - LANG_OBJC=false - LANG_GO=false - LANG_FORTRAN=false - LANG_D=false - LANG_JIT=false - - STRIP_FOR_TARGET=${CROSS_COMPILE}strip - _builddir="$srcdir/build-cross-native" -else - STRIP_FOR_TARGET=${CROSS_COMPILE}strip - _builddir="$srcdir/build" -fi - -case "$CARCH" in -# GDC hasn't been ported to PowerPC -# See libphobos/configure.tgt in GCC sources for supported targets -# riscv fails with: error: static assert "unimplemented" -ppc64le|riscv64) LANG_D=false ;; -# GDC does currently not work on 32-bit musl architectures. -# This is a known upstream issue. -# See: https://github.com/dlang/druntime/pull/3383 -armhf|armv7|x86) LANG_D=false ;; -esac - -# libitm has TEXTRELs in ARM build, so disable for now -case "$CTARGET_ARCH" in -arm*) _libitm=false ;; -mips*) _libitm=false ;; -riscv64) _libitm=false ;; -esac - -# Internal libffi fails to build on MIPS at the moment, need to -# investigate further. We disable LANG_GO on mips64 as it requires -# the internal libffi. -case "$CTARGET_ARCH" in -mips*) LANG_GO=false ;; -esac - -# Fortran uses libquadmath if toolchain has __float128 -# currently on x86, x86_64 and ia64 -_libquadmath=$LANG_FORTRAN -case "$CTARGET_ARCH" in -x86 | x86_64) _libquadmath=$LANG_FORTRAN ;; -*) _libquadmath=false ;; -esac - -# libatomic is a dependency for openvswitch -$_libatomic && subpackages="$subpackages libatomic::$CTARGET_ARCH" -$_libgcc && subpackages="$subpackages libgcc::$CTARGET_ARCH" -$_libquadmath && subpackages="$subpackages libquadmath::$CTARGET_ARCH" -if $_libgomp; then - depends="$depends libgomp=$_gccrel" - subpackages="$subpackages libgomp::$CTARGET_ARCH" -fi - -case "$CARCH" in -riscv64) -LANG_ADA=false;; -esac - -_languages=c -if $LANG_CXX; then - subpackages="$subpackages libstdc++:libcxx:$CTARGET_ARCH libstdc++-dev$_target:libcxx_dev g++$_target:gpp" - _languages="$_languages,c++" -fi -if $LANG_D; then - subpackages="$subpackages libgphobos::$CTARGET_ARCH gcc-gdc$_target:gdc" - _languages="$_languages,d" - makedepends_build="$makedepends_build libucontext-dev gcc-gdc-bootstrap" -fi -if $LANG_OBJC; then - subpackages="$subpackages libobjc::$CTARGET_ARCH gcc-objc$_target:objc" - _languages="$_languages,objc" -fi -if $LANG_GO; then - subpackages="$subpackages libgo::$CTARGET_ARCH gcc-go$_target:go" - _languages="$_languages,go" -fi -if $LANG_FORTRAN; then - subpackages="$subpackages libgfortran::$CTARGET_ARCH gfortran$_target:gfortran" - _languages="$_languages,fortran" -fi -if $LANG_ADA; then - subpackages="$subpackages gcc-gnat$_target:gnat" - _languages="$_languages,ada" - if [ "$CBUILD" = "$CTARGET" ]; then - makedepends_build="$makedepends_build gcc-gnat-bootstrap" - subpackages="$subpackages libgnat-static:libgnatstatic:$CTARGET_ARCH libgnat::$CTARGET_ARCH" - else - subpackages="$subpackages libgnat::$CTARGET_ARCH" - makedepends_build="$makedepends_build gcc-gnat gcc-gnat$_cross" - fi -fi -if $LANG_JIT; then - subpackages="$subpackages libgccjit:jit libgccjit-dev:jitdev" -fi -makedepends="$makedepends_build $makedepends_host" - -# when using upstream releases, use this URI template -# https://gcc.gnu.org/pub/gcc/releases/gcc-${_pkgbase:-$pkgver}/gcc-${_pkgbase:-$pkgver}.tar.xz -# -# right now, we are using a git snapshot. snapshots are taken from gcc.gnu.org/pub/gcc/snapshots. -# However, since they are periodically deleted from the GCC mirrors the utilized snapshots are -# mirrored on dev.alpinelinux.org. Please ensure that the snapshot Git commit (as stated in the -# README) matches the base commit on the version-specific branch in the Git repository below. -# -# PLEASE submit all patches to gcc to https://gitlab.alpinelinux.org/kaniini/alpine-gcc-patches, -# so that they can be properly tracked and easily rebased if needed. -source="https://dev.alpinelinux.org/archive/gcc/${_pkgbase%%.*}-${_pkgsnap}/gcc-${_pkgbase%%.*}-${_pkgsnap}.tar.xz - 0001-posix_memalign.patch - 0002-gcc-poison-system-directories.patch - 0003-specs-turn-on-Wl-z-now-by-default.patch - 0004-Turn-on-D_FORTIFY_SOURCE-2-by-default-for-C-C-ObjC-O.patch - 0005-On-linux-targets-pass-as-needed-by-default-to-the-li.patch - 0006-Enable-Wformat-and-Wformat-security-by-default.patch - 0007-Enable-Wtrampolines-by-default.patch - 0008-Disable-ssp-on-nostdlib-nodefaultlibs-and-ffreestand.patch - 0009-Ensure-that-msgfmt-doesn-t-encounter-problems-during.patch - 0010-Don-t-declare-asprintf-if-defined-as-a-macro.patch - 0011-libiberty-copy-PIC-objects-during-build-process.patch - 0012-libitm-disable-FORTIFY.patch - 0013-libgcc_s.patch - 0014-nopie.patch - 0015-dlang-use-libucontext-on-mips64.patch - 0016-ada-fix-shared-linking.patch - 0017-build-fix-CXXFLAGS_FOR_BUILD-passing.patch - 0018-add-fortify-headers-paths.patch - 0019-Alpine-musl-package-provides-libssp_nonshared.a.-We-.patch - 0020-DP-Use-push-state-pop-state-for-gold-as-well-when-li.patch - 0021-mips64-disable-multilib-support.patch - 0022-aarch64-disable-multilib-support.patch - 0023-s390x-disable-multilib-support.patch - 0024-ppc64-le-disable-multilib-support.patch - 0025-x86_64-disable-multilib-support.patch - 0026-riscv-disable-multilib-support.patch - 0027-always-build-libgcc_eh.a.patch - 0028-ada-libgnarl-compatibility-for-musl.patch - 0029-ada-musl-support-fixes.patch - 0033-gcc-go-link-to-libucontext.patch - 0034-Use-generic-errstr.go-implementation-on-musl.patch - 0035-configure-Add-enable-autolink-libatomic-use-in-LINK_.patch - 0036-configure-fix-detection-of-atomic-builtins-in-libato.patch - 0037-libgo-Recognize-off64_t-and-loff_t-definitions-of-mu.patch - 0039-gcc-go-Use-int64-type-as-offset-argument-for-mmap.patch - 0041-go-gospec-forcibly-disable-fsplit-stack-support.patch - 0042-gcc-go-fix-build-error-with-SYS_SECCOMP.patch - 0043-libstdc-do-not-throw-exceptions-for-non-C-locales-on.patch - 0044-gdc-unconditionally-link-libgphobos-against-libucont.patch - 0045-druntime-link-against-libucontext-on-all-platforms.patch - 0049-libgo-adjust-name-of-union-in-sigevent-struct.patch - 0050-libphobos-don-t-define-__mode_t-twice-on-musl-target.patch - 0051-libgo-Explicitly-define-SYS_timer_settime-for-32-bit.patch - 0052-libgnat-time_t-is-always-64-bit-on-musl-libc.patch - 0053-libgo-make-match.sh-POSIX-shell-compatible.patch - " - -# we build out-of-tree -_gccdir="$srcdir"/gcc-${_pkgbase%%.*}-${_pkgsnap} -_gcclibdir="/usr/lib/gcc/$CTARGET/${_pkgbase:-$pkgver}" -_gcclibexec="/usr/libexec/gcc/$CTARGET/${_pkgbase:-$pkgver}" - -prepare() { - cd "$_gccdir" - - _err= - for i in $source; do - case "$i" in - *.patch) - msg "Applying $i" - patch -p1 -i "$srcdir"/$i || _err="$_err $i" - ;; - esac - done - - if [ -n "$_err" ]; then - error "The following patches failed:" - for i in $_err; do - echo " $i" - done - return 1 - fi - - echo ${_pkgbase:-$pkgver} > gcc/BASE-VER -} - -build() { - local _arch_configure= - local _libc_configure= - local _cross_configure= - local _bootstrap_configure= - local _symvers= - local _jit_configure= - - cd "$_gccdir" - - case "$CTARGET" in - aarch64-*-*-*) _arch_configure="--with-arch=armv8-a --with-abi=lp64";; - armv5-*-*-*eabi) _arch_configure="--with-arch=armv5te --with-tune=arm926ej-s --with-float=soft --with-abi=aapcs-linux";; - armv6-*-*-*eabihf) _arch_configure="--with-arch=armv6zk --with-tune=arm1176jzf-s --with-fpu=vfp --with-float=hard --with-abi=aapcs-linux";; - armv7-*-*-*eabihf) _arch_configure="--with-arch=armv7-a --with-tune=generic-armv7-a --with-fpu=vfpv3-d16 --with-float=hard --with-abi=aapcs-linux --with-mode=thumb";; - mips-*-*-*) _arch_configure="--with-arch=mips32 --with-mips-plt --with-float=soft --with-abi=32";; - mips64-*-*-*) _arch_configure="--with-arch=mips3 --with-tune=mips64 --with-mips-plt --with-float=soft --with-abi=64";; - mips64el-*-*-*) _arch_configure="--with-arch=mips3 --with-tune=mips64 --with-mips-plt --with-float=soft --with-abi=64";; - mipsel-*-*-*) _arch_configure="--with-arch=mips32 --with-mips-plt --with-float=soft --with-abi=32";; - powerpc-*-*-*) _arch_configure="--enable-secureplt --enable-decimal-float=no";; - powerpc64*-*-*-*) _arch_configure="--with-abi=elfv2 --enable-secureplt --enable-decimal-float=no --enable-targets=powerpcle-linux";; - i486-*-*-*) _arch_configure="--with-arch=i486 --with-tune=generic --enable-cld";; - i586-*-*-*) _arch_configure="--with-arch=i586 --with-tune=generic --enable-cld";; - s390x-*-*-*) _arch_configure="--with-arch=z196 --with-tune=zEC12 --with-zarch --with-long-double-128 --enable-decimal-float";; - riscv64-*-*-*) _arch_configure="--with-arch=rv64gc --with-abi=lp64d --enable-autolink-libatomic";; - esac - - case "$CTARGET_ARCH" in - mips*) _hash_style_configure="--with-linker-hash-style=sysv" ;; - *) _hash_style_configure="--with-linker-hash-style=gnu" ;; - esac - - case "$CTARGET_LIBC" in - musl) - # musl does not support mudflap, or libsanitizer - # libmpx uses secure_getenv and struct _libc_fpstate not present in musl - # alpine musl provides libssp_nonshared.a, so we don't need libssp either - _libc_configure="--disable-libssp --disable-libmpx --disable-libmudflap --disable-libsanitizer" - _symvers="--disable-symvers" - export libat_cv_have_ifunc=no - ;; - esac - - [ "$CBUILD" != "$CHOST" ] && _cross_configure="--disable-bootstrap" - [ "$CHOST" != "$CTARGET" ] && _cross_configure="--disable-bootstrap --with-sysroot=$CBUILDROOT" - - case "$BOOTSTRAP" in - nolibc) _bootstrap_configure="--with-newlib --disable-shared --enable-threads=no" ;; - *) _bootstrap_configure="--enable-shared --enable-threads --enable-tls" ;; - esac - - $_libgomp || _bootstrap_configure="$_bootstrap_configure --disable-libgomp" - $_libatomic || _bootstrap_configure="$_bootstrap_configure --disable-libatomic" - $_libitm || _bootstrap_configure="$_bootstrap_configure --disable-libitm" - $_libquadmath || _arch_configure="$_arch_configure --disable-libquadmath" - - msg "Building the following:" - echo "" - echo " CBUILD=$CBUILD" - echo " CHOST=$CHOST" - echo " CTARGET=$CTARGET" - echo " CTARGET_ARCH=$CTARGET_ARCH" - echo " CTARGET_LIBC=$CTARGET_LIBC" - echo " languages=$_languages" - echo " arch_configure=$_arch_configure" - echo " libc_configure=$_libc_configure" - echo " cross_configure=$_cross_configure" - echo " bootstrap_configure=$_bootstrap_configure" - echo " hash_style_configure=$_hash_style_configure" - echo "" - - export CFLAGS="$CFLAGS -O2" - export CXXFLAGS="$CXXFLAGS -O2" - export CPPFLAGS="$CPPFLAGS -O2" - - local version="Alpine $pkgver-r$pkgrel" - local gccconfiguration=" - --prefix=/usr - --mandir=/usr/share/man - --infodir=/usr/share/info - --build=${CBUILD} - --host=${CHOST} - --target=${CTARGET} - --enable-checking=release - --disable-fixed-point - --disable-libstdcxx-pch - --disable-multilib - --disable-nls - --disable-werror - $_symvers - --enable-__cxa_atexit - --enable-default-pie - --enable-default-ssp - --enable-cloog-backend - --enable-languages=$_languages - $_arch_configure - $_libc_configure - $_cross_configure - $_bootstrap_configure - --with-bugurl=https://gitlab.alpinelinux.org/alpine/aports/-/issues - --with-system-zlib - $_hash_style_configure - " - - mkdir -p "$_builddir" - cd "$_builddir" - "$_gccdir"/configure $gccconfiguration \ - --with-pkgversion="$version" - - msg "building gcc" - make - - # we build gccjit separate to not build all of gcc with --enable-host-shared - # as doing so slows it down a few %, so for some quick if's here we gain - # free performance - if $LANG_JIT; then - mkdir -p "$_builddir"/libgccjit-build - cd "$_builddir"/libgccjit-build - "$_gccdir"/configure $gccconfiguration \ - --disable-bootstrap \ - --enable-host-shared \ - --enable-languages=jit \ - --with-pkgversion="$version" - - msg "building libgccjit" - make all-gcc - fi -} - -package() { - cd "$_builddir" - make DESTDIR="$pkgdir" install - - ln -s gcc "$pkgdir"/usr/bin/cc - - if $LANG_JIT; then - make -C "$_builddir"/libgccjit-build/gcc DESTDIR="$pkgdir" jit.install-common - fi - - # we dont support gcj -static - # and saving 35MB is not bad. - find "$pkgdir" \( -name libgtkpeer.a \ - -o -name libgjsmalsa.a \ - -o -name libgij.a \) \ - -delete - - # strip debug info from some static libs - find "$pkgdir" \( -name libgfortran.a -o -name libobjc.a -o -name libgomp.a \ - -o -name libgphobos.a -o -name libgdruntime.a \ - -o -name libmudflap.a -o -name libmudflapth.a \ - -o -name libgcc.a -o -name libgcov.a -o -name libquadmath.a \ - -o -name libitm.a -o -name libgo.a -o -name libcaf\*.a \ - -o -name libatomic.a -o -name libasan.a -o -name libtsan.a \) \ - -a -type f \ - -exec ${STRIP_FOR_TARGET} -g {} + - - if $_libgomp; then - mv "$pkgdir"/usr/lib/libgomp.spec "$pkgdir"/$_gcclibdir - fi - if $_libitm; then - mv "$pkgdir"/usr/lib/libitm.spec "$pkgdir"/$_gcclibdir - fi - - # remove ffi - rm -f "$pkgdir"/usr/lib/libffi* "$pkgdir"/usr/share/man/man3/ffi* - find "$pkgdir" -name 'ffi*.h' -delete - - local gdblib=${_target:+$CTARGET/}lib - if [ -d "$pkgdir"/usr/$gdblib/ ]; then - for i in $(find "$pkgdir"/usr/$gdblib/ -type f -maxdepth 1 -name "*-gdb.py"); do - mkdir -p "$pkgdir"/usr/share/gdb/python/auto-load/usr/$gdblib - mv "$i" "$pkgdir"/usr/share/gdb/python/auto-load/usr/$gdblib/ - done - fi - - # move ada runtime libs - if $LANG_ADA; then - for i in $(find "$pkgdir"/$_gcclibdir/adalib/ -type f -maxdepth 1 -name "libgna*.so"); do - mv "$i" "$pkgdir"/usr/lib/ - ln -s ../../../../${i##*/} $i - done - if [ "$CHOST" = "$CTARGET" ]; then - for i in $(find "$pkgdir"/$_gcclibdir/adalib/ -type f -maxdepth 1 -name "libgna*.a"); do - mv "$i" "$pkgdir"/usr/lib/ - ln -s ../../../../${i##*/} $i - done - fi - fi - - if [ "$CHOST" != "$CTARGET" ]; then - # cross-gcc: remove any files that would conflict with the - # native gcc package - rm -rf "$pkgdir"/usr/bin/cc "$pkgdir"/usr/include "${pkgdir:?}"/usr/share - # libcc1 does not depend on target, don't ship it - rm -rf "$pkgdir"/usr/lib/libcc1.so* - - # fixup gcc library symlinks to be linker scripts so - # linker finds the libs from relocated sysroot - for so in "$pkgdir"/usr/"$CTARGET"/lib/*.so; do - if [ -h "$so" ]; then - local _real=$(basename "$(readlink "$so")") - rm -f "$so" - echo "GROUP ($_real)" > "$so" - fi - done - else - # add c89/c99 wrapper scripts - cat >"$pkgdir"/usr/bin/c89 <<'EOF' -#!/bin/sh -_flavor="-std=c89" -for opt; do - case "$opt" in - -ansi|-std=c89|-std=iso9899:1990) _flavor="";; - -std=*) echo "$(basename $0) called with non ANSI/ISO C option $opt" >&2 - exit 1;; - esac -done -exec gcc $_flavor ${1+"$@"} -EOF - cat >"$pkgdir"/usr/bin/c99 <<'EOF' -#!/bin/sh -_flavor="-std=c99" -for opt; do - case "$opt" in - -std=c99|-std=iso9899:1999) _flavor="";; - -std=*) echo "$(basename $0) called with non ISO C99 option $opt" >&2 - exit 1;; - esac -done -exec gcc $_flavor ${1+"$@"} -EOF - chmod 755 "$pkgdir"/usr/bin/c?9 - - # install lto plugin so regular binutils may use it - mkdir -p "$pkgdir"/usr/lib/bfd-plugins - ln -s /$_gcclibexec/liblto_plugin.so "$pkgdir/usr/lib/bfd-plugins/" - fi -} - -libatomic() { - pkgdesc="GCC Atomic library" - depends= - replaces="gcc" - - mkdir -p "$subpkgdir"/usr/lib - mv "$pkgdir"/usr/${_target:+$CTARGET/}lib/libatomic.so.* "$subpkgdir"/usr/lib/ -} - -libcxx() { - pkgdesc="GNU C++ standard runtime library" - depends= - - if [ "$CHOST" = "$CTARGET" ]; then - # verify that we are using clock_gettime rather than doing direct syscalls - # so we dont break 32 bit arches due to time64. - nm -D "$pkgdir"/usr/lib/libstdc++.so.* | grep clock_gettime - fi - - mkdir -p "$subpkgdir"/usr/lib - mv "$pkgdir"/usr/${_target:+$CTARGET/}lib/libstdc++.so.* "$subpkgdir"/usr/lib/ -} - -libcxx_dev() { - pkgdesc="GNU C++ standard runtime library (development files)" - depends= - replaces="g++" - - amove usr/${_target:+$CTARGET/}lib/libstdc++.a \ - usr/${_target:+$CTARGET/}lib/libstdc++.so \ - usr/${_target:+$CTARGET/}lib/libstdc++fs.a \ - usr/${_target:+$CTARGET/}lib/libsupc++.a \ - usr/${_target:+$CTARGET/}include/c++ -} - -gpp() { - pkgdesc="GNU C++ standard library and compiler" - depends="libstdc++=$_gccrel libstdc++-dev$_target=$_gccrel gcc$_target=$_gccrel libc-dev" - mkdir -p "$subpkgdir/$_gcclibexec" \ - "$subpkgdir"/usr/bin \ - "$subpkgdir"/usr/${_target:+$CTARGET/}include \ - "$subpkgdir"/usr/${_target:+$CTARGET/}lib \ - - mv "$pkgdir/$_gcclibexec/cc1plus" "$subpkgdir/$_gcclibexec/" - - mv "$pkgdir"/usr/bin/*++ "$subpkgdir"/usr/bin/ -} - -jit() { - pkgdesc="GCC JIT Library" - depends= - amove usr/lib/libgccjit.so* -} - -jitdev() { - pkgdesc="GCC JIT Library (development files)" - depends="libgccjit" - amove usr/include/libgccjit*.h -} - -libobjc() { - pkgdesc="GNU Objective-C runtime" - replaces="objc" - depends= - mkdir -p "$subpkgdir"/usr/lib - mv "$pkgdir"/usr/${_target:+$CTARGET/}lib/libobjc.so.* "$subpkgdir"/usr/lib/ -} - -objc() { - pkgdesc="GNU Objective-C" - replaces="gcc" - depends="libc-dev gcc=$_gccrel libobjc=$_gccrel" - - mkdir -p "$subpkgdir/$_gcclibexec" \ - "$subpkgdir"/$_gcclibdir/include \ - "$subpkgdir"/usr/lib - mv "$pkgdir/$_gcclibexec/cc1obj" "$subpkgdir/$_gcclibexec/" - mv "$pkgdir"/$_gcclibdir/include/objc "$subpkgdir"/$_gcclibdir/include/ - mv "$pkgdir"/usr/lib/libobjc.so "$pkgdir"/usr/lib/libobjc.a \ - "$subpkgdir"/usr/lib/ -} - -libgcc() { - pkgdesc="GNU C compiler runtime libraries" - depends= - - mkdir -p "$subpkgdir"/usr/lib - mv "$pkgdir"/usr/${_target:+$CTARGET/}lib/libgcc_s.so.* "$subpkgdir"/usr/lib/ -} - -libgomp() { - pkgdesc="GCC shared-memory parallel programming API library" - depends= - replaces="gcc" - - mkdir -p "$subpkgdir"/usr/lib - mv "$pkgdir"/usr/${_target:+$CTARGET/}lib/libgomp.so.* "$subpkgdir"/usr/lib/ -} - -libgphobos() { - pkgdesc="D programming language standard library for GCC" - depends= - - mkdir -p "$subpkgdir"/usr/lib - mv "$pkgdir"/usr/lib/libgdruntime.so.* "$subpkgdir"/usr/lib/ - mv "$pkgdir"/usr/lib/libgphobos.so.* "$subpkgdir"/usr/lib/ -} - -gdc() { - pkgdesc="GCC-based D language compiler" - depends="gcc=$_gccrel libgphobos=$_gccrel musl-dev" - depends="$depends libucontext-dev" - provides="gcc-gdc-bootstrap=$_gccrel" - - mkdir -p "$subpkgdir/$_gcclibexec" \ - "$subpkgdir"/$_gcclibdir/include/d/ \ - "$subpkgdir"/usr/lib \ - "$subpkgdir"/usr/bin - # Copy: The installed '.d' files, the static lib, the binary itself - # The shared libs are part of 'libgphobos' so one can run program - # without installing the compiler - mv "$pkgdir/$_gcclibexec/d21" "$subpkgdir/$_gcclibexec/" - mv "$pkgdir"/$_gcclibdir/include/d/* "$subpkgdir"/$_gcclibdir/include/d/ - mv "$pkgdir"/usr/lib/libgdruntime.a "$subpkgdir"/usr/lib/ - mv "$pkgdir"/usr/lib/libgdruntime.so "$subpkgdir"/usr/lib/ - mv "$pkgdir"/usr/lib/libgphobos.a "$subpkgdir"/usr/lib/ - mv "$pkgdir"/usr/lib/libgphobos.so "$subpkgdir"/usr/lib/ - mv "$pkgdir"/usr/lib/libgphobos.spec "$subpkgdir"/usr/lib/ - mv "$pkgdir"/usr/bin/$CTARGET-gdc "$subpkgdir"/usr/bin/ - mv "$pkgdir"/usr/bin/gdc "$subpkgdir"/usr/bin/ -} - - -libgo() { - pkgdesc="Go runtime library for GCC" - depends= - - mkdir -p "$subpkgdir"/usr/lib - mv "$pkgdir"/usr/lib/libgo.so.* "$subpkgdir"/usr/lib/ -} - -go() { - pkgdesc="Go support for GCC" - depends="gcc=$_gccrel libgo=$_gccrel !go" - - mkdir -p "$subpkgdir"/$_gcclibexec \ - "$subpkgdir"/usr/lib \ - "$subpkgdir"/usr/bin - mv "$pkgdir"/usr/lib/go "$subpkgdir"/usr/lib/ - mv "$pkgdir"/usr/bin/*gccgo "$subpkgdir"/usr/bin/ - mv "$pkgdir"/usr/bin/*go "$subpkgdir"/usr/bin - mv "$pkgdir"/usr/bin/*gofmt "$subpkgdir"/usr/bin - mv "$pkgdir"/$_gcclibexec/go1 "$subpkgdir"/$_gcclibexec/ - mv "$pkgdir"/$_gcclibexec/cgo "$subpkgdir"/$_gcclibexec/ - mv "$pkgdir"/$_gcclibexec/buildid "$subpkgdir"/$_gcclibexec/ - mv "$pkgdir"/$_gcclibexec/test2json "$subpkgdir"/$_gcclibexec/ - mv "$pkgdir"/$_gcclibexec/vet "$subpkgdir"/$_gcclibexec/ - mv "$pkgdir"/usr/lib/libgo.a \ - "$pkgdir"/usr/lib/libgo.so \ - "$pkgdir"/usr/lib/libgobegin.a \ - "$pkgdir"/usr/lib/libgolibbegin.a \ - "$subpkgdir"/usr/lib/ -} - -libgfortran() { - pkgdesc="Fortran runtime library for GCC" - depends= - - mkdir -p "$subpkgdir"/usr/lib - mv "$pkgdir"/usr/lib/libgfortran.so.* "$subpkgdir"/usr/lib/ -} - -libquadmath() { - replaces="gcc" - pkgdesc="128-bit math library for GCC" - depends= - - mkdir -p "$subpkgdir"/usr/lib - mv "$pkgdir"/usr/lib/libquadmath.so.* "$subpkgdir"/usr/lib/ -} - -gfortran() { - pkgdesc="GNU Fortran Compiler" - depends="gcc=$_gccrel libgfortran=$_gccrel" - $_libquadmath && depends="$depends libquadmath=$_gccrel" - replaces="gcc" - - mkdir -p "$subpkgdir"/$_gcclibexec \ - "$subpkgdir"/$_gcclibdir \ - "$subpkgdir"/usr/lib \ - "$subpkgdir"/usr/bin - mv "$pkgdir"/usr/bin/*gfortran "$subpkgdir"/usr/bin/ - mv "$pkgdir"/usr/lib/libgfortran.a \ - "$pkgdir"/usr/lib/libgfortran.so \ - "$subpkgdir"/usr/lib/ - if $_libquadmath; then - mv "$pkgdir"/usr/lib/libquadmath.a \ - "$pkgdir"/usr/lib/libquadmath.so \ - "$subpkgdir"/usr/lib/ - fi - mv "$pkgdir"/$_gcclibdir/finclude "$subpkgdir"/$_gcclibdir/ - mv "$pkgdir"/$_gcclibexec/f951 "$subpkgdir"/$_gcclibexec - mv "$pkgdir"/usr/lib/libgfortran.spec "$subpkgdir"/$_gcclibdir -} - -libgnat() { - pkgdesc="GNU Ada runtime shared libraries" - depends= - - mkdir -p "$subpkgdir"/usr/lib - mv "$pkgdir"/usr/lib/libgna*.so "$subpkgdir"/usr/lib/ -} - -libgnatstatic() { - pkgdesc="GNU Ada static libraries" - depends= - - mkdir -p "$subpkgdir"/usr/lib - mv "$pkgdir"/usr/lib/libgna*.a "$subpkgdir"/usr/lib/ -} - -gnat() { - pkgdesc="Ada support for GCC" - depends="gcc=$_gccrel" - provides="$pkgname-gnat-bootstrap=$_gccrel" - [ "$CHOST" = "$CTARGET" ] && depends="$depends libgnat=$_gccrel" - - mkdir -p "$subpkgdir"/$_gcclibexec \ - "$subpkgdir"/$_gcclibdir \ - "$subpkgdir"/usr/bin - mv "$pkgdir"/$_gcclibexec/*gnat* "$subpkgdir"/$_gcclibexec/ - mv "$pkgdir"/$_gcclibdir/*ada* "$subpkgdir"/$_gcclibdir/ - mv "$pkgdir"/usr/bin/*gnat* "$subpkgdir"/usr/bin/ -} - -sha512sums=" -ba4d9e73d108088da26fbefe18d9b245b76771ffe752c2b4b31bdf38a2d0b638fbc115c377526c27311d4d7ffd4e0d236a5af5016bd364ccaa11a4989d1401e8 gcc-12-20220924.tar.xz -41cbb4d69218006cf9e0cdb6c86212ef451f8decd52a50a7dbb4d34726009da7a4e0261c852b46cb584db253a4bae2f31dc485c506cb545e64a7d26e0ba6c2b6 0001-posix_memalign.patch -531155055cda7f119bcac6479bcae73af9201cd596af9cf1616850bbcf4393b91c5de9f2fbbc1cde6e158fb4df7237b033146f662dff5fa0ea12151cc514adb8 0002-gcc-poison-system-directories.patch -c1275d77b5269386a2ec683933570810f5a2ba1208c161ed887797eb9aee3cb82ef08a8964635902614e6a6e83f3065ba0801c9355d85dd8d60cb1fa20bdf687 0003-specs-turn-on-Wl-z-now-by-default.patch -a54e45bff4484a35d3826435a414d909281453f5605f4081cf3be1f15336cceed93a1d8a54e92e2fa97188623e3030ca1323d7749141e228a7db73795230d86a 0004-Turn-on-D_FORTIFY_SOURCE-2-by-default-for-C-C-ObjC-O.patch -ad132ddbd0c33a3983e3de4f74d8fdb8cb1ddf53ef54de0a5c12efb49e42014ed117165d43f396bcf3455ecfe2c8620e0326e73b4160a370a4cc92d213329c34 0005-On-linux-targets-pass-as-needed-by-default-to-the-li.patch -0b9ce0f130a7b797770f3d58a5200575f20e5663c86c0c5710718b7bffd3416cc2f05861613d9c258428e9541c6e0b9837d01f0c99d383e2c3de0503a988e861 0006-Enable-Wformat-and-Wformat-security-by-default.patch -e7813acc7ead61373c212cefbe53eb020b4c5bd8f0f35ee972e0524060713f911624f5a1a871feada642e1f3f5e48c8508125ca2da09de351d544bedf1d44ada 0007-Enable-Wtrampolines-by-default.patch -d0d0566a11e4828bdd6f53346a9a6b9841f3066d3f4a05ee2b6fe97aeb4552654170e7662318ea18fc777c3e75c88a067097478fc4e880a3f9c134b8a3af2277 0008-Disable-ssp-on-nostdlib-nodefaultlibs-and-ffreestand.patch -f75e63d9d933874f18fb7f55b135c60dfa0377abafa8e0edb91b85d5f00f4f072d0a338ba5d9baec18494211dbbda8068782830dbafbb37068936f76aede270f 0009-Ensure-that-msgfmt-doesn-t-encounter-problems-during.patch -afa4daba222a19569588736a8276dc7c12223a7c222f3dd3795dc3f1cd90f40b90518971ae27b358020354f89562c9680ec8b8e24e85e6d4f8e54e79d185359b 0010-Don-t-declare-asprintf-if-defined-as-a-macro.patch -79dac82249fb573ec477e1451a33883302eb63a5110853faed117f5021221f2153e2ec845dd5a0043b1bf9f0e5736ef0c89743ff2d771774a281c8b24542803a 0011-libiberty-copy-PIC-objects-during-build-process.patch -b035f85c1703b45d15c1d1ffe7d23400e01625e5d403504911cc92f740b02586447de2a9d66a9f80f12b9c227bc193e2a43942c8af2bdb42cdeff8272bbe6068 0012-libitm-disable-FORTIFY.patch -9fb4d396a9493d2d68fe829ce075ba4c5df148b1d6aaab315a6f8ccbdd70d0e052a5dc50369adc2dab005b4a3becd1504b182faed6e82c86accb95f5bc2b9f50 0013-libgcc_s.patch -f82ac22961d842c9f8e731a601bb255918cc160969888363ad2d83e2ccf08b19114a200d46bcf99d097bf530f470c2b1e71e46828bc1b9fff5469ff945f541d8 0014-nopie.patch -6527dc9d250db48d56cf01e9299461bf22a838ffda96c40d448e18f457b206cec2322275d2d5abbbaf3c6573c5e7eba12724c9691b601f118ff7520e19726373 0015-dlang-use-libucontext-on-mips64.patch -6c3ce0ccd68b19e2c76172d8f24b0747ee0af2b8de7af692f2f699848267d7fc42fec8e5c303102fe05be7e934b56f21eea17ce47c8aca20570590830d88e9b0 0016-ada-fix-shared-linking.patch -7089a96aaec8e0b222cb3fa7301d71bb2e328a24dec33e15ea9e3e7695bcae919308249b9a3be5ea2f3b1f069f9fd1739066f31d12317fcdab0596dba9ca54a4 0017-build-fix-CXXFLAGS_FOR_BUILD-passing.patch -b7ebdeee0b143052fdd6e3efa070ea8621d4fb729312cbc787d618e666b593990a20cd9044a786265970d8e09ec13da03b797009543d0b657b0fe924f2dcaa68 0018-add-fortify-headers-paths.patch -8e682893d6367732ab8c490b915112a68d98855deec3bd8db91dc0d9bf486b8c044b13ee2b95c4806da7ac17c41034e081b7a66861018274cb33fdb2fd6df04a 0019-Alpine-musl-package-provides-libssp_nonshared.a.-We-.patch -a14c5f98ade5af8cd6e3a0244752674d9c4f6dadb4260f98f1949bff51ac1211a3f8319e0f933f776e98998d2c7221004f92413f97ccc2e966f8462ed6d33597 0020-DP-Use-push-state-pop-state-for-gold-as-well-when-li.patch -28c1d477da79aa212ac79e4b02cf865d8b9c31cec6c42f41b4268e3f3c49bf67fb51e54180abe543a54e550788bb472bfcf1b4bc38d072a792d7403dbbee178a 0021-mips64-disable-multilib-support.patch -0920e31c46bf937b47a0602766f042d45adb71abf332ee84399c665c12298ef115cff945fe26d646b0276bfdfdd04913970e6f1f8784a11c26e15111c854643d 0022-aarch64-disable-multilib-support.patch -e4c6bf7ec40f2798c8e5b40a543aecffd5591a2805546b3b97aaa4fbe4df6ce4330a60973a9ddfbca9890590606d5204e7f653ab2b6e4b2c13feeb595b68e63a 0023-s390x-disable-multilib-support.patch -0e956d793c94283ce5af7fe84bfcbb655585a9573608e9bf497fa7b726e12daa391e44977d0a8c97fb460aba89b1773b91e036b0ee1ef4d6263a3943cb63d9cd 0024-ppc64-le-disable-multilib-support.patch -95917fcb60dbc0a8134db9beb583f3c9ea61128499c214f594c434ec8246641ec41e245ce2d1d9b85ffd40ea0e5764f7a33c5522b2547145814245ac0fa25025 0025-x86_64-disable-multilib-support.patch -b40d7e4712c035674c993bbb55475290ec14523b3f0fd05493514bac4e9adaa6641faf815fc40ffc00119d9fd64be28218ee874c289ec7430eeef05ab2fcae5e 0026-riscv-disable-multilib-support.patch -674360ce2ee9f704d0632cc98756f9fe8dd8ca30064fb9d3423b437f7e679c1c51e765b15e535dcb278cd2769583690acb3395b91e4fd5f6f4e3b97879fcc313 0027-always-build-libgcc_eh.a.patch -f060687adcd5297124e4000f1ba1e3fd5d7d124da04d948cbd0d4a6c69a90a2b29a4a0dbbe13a83ab6950724f434de012b681bdbcdf53c0100b40fe3d00f2f2f 0028-ada-libgnarl-compatibility-for-musl.patch -5160bae68e20a1966c1f6d655ee98af759e9b9ee842718ae6007d467b418e1cf3b307528a0841477b5259671ce868521b06c0f2e947b7b8f3a398c53dd978252 0029-ada-musl-support-fixes.patch -3c04b26554a78096296ca9542c77a91219bd26044dd2cb2006db4c1944889a97c215900b3828ba7e8c675162406db543605a815bdfbd915bf810663b1b253bdd 0033-gcc-go-link-to-libucontext.patch -699dc3641099da6136dd3689f06c6553c03b3a85acf83a3fce1beb5425065b3e378535ca9e9100a120fdbafc34871d61c063fd5328a49cd87a15a989ed51706d 0034-Use-generic-errstr.go-implementation-on-musl.patch -d9ba710f770e053c8f212e821817c188091a829658050b9ab5906388553ec60fec37943ea43c270e92a9014902949f3c98fc4639032d92b8145b375bb29e193e 0035-configure-Add-enable-autolink-libatomic-use-in-LINK_.patch -ab90d8fdd977d6cd3da096a1c76d77be3e89a020b2127247771711a32eb608cceed21834ef488ab4b69bb0f408b098fdfb61630819e3d1a1e57d5af67800ee74 0036-configure-fix-detection-of-atomic-builtins-in-libato.patch -8bc6823f0b3c66f7b73d7ddb64ffa6930463285c2e9a14a2bc1882bcc4271144eaa1107d713294699caf9481648163cbf43921a2b8e4ac0d55c78a804bae8a3d 0037-libgo-Recognize-off64_t-and-loff_t-definitions-of-mu.patch -e9699f4721778869eb3a8fef2c679208ef5b98584892f30b0e1cb5dc1669f8158198d7792659b1b56c381baf62247d21990dcced9178547affd5d6bfb2d12548 0039-gcc-go-Use-int64-type-as-offset-argument-for-mmap.patch -d6dc1bfb881a313d167aaa5658790b0f55eea4336c408cfc6613dd5783440dafd0d37c43031a5f3e69be40f632e38371cd4fb6e5f0494ac4ea4d7d5025d2ae02 0041-go-gospec-forcibly-disable-fsplit-stack-support.patch -684c6a6d52512b973429b6e709966439ac1e174f9e79a33d4a638b452245b457b34752b4b4034ba983f6a712f86522e7adf715bab00a6603f64a12139c5b1e39 0042-gcc-go-fix-build-error-with-SYS_SECCOMP.patch -25014dfa99d96ee70ce0ad22e9f7974f0a51cc50b3b9c2db49df50774c8cd29e497ceed120486bee50be83bfb07f2009ed310eb9b0543f2795bd7359b87eadd2 0043-libstdc-do-not-throw-exceptions-for-non-C-locales-on.patch -75fd83ac05ab0a08d5f48547b08810f9934209bc78b5db59d65f33887b382af7ec24d8a29d40f86325c05af40c1ae1ec6466c839f646af90afc895a13073d07b 0044-gdc-unconditionally-link-libgphobos-against-libucont.patch -13e047153076d6e1fc40c9f5b6bfe5699c0e5460248f3d2b35ae36677cb960525af7b0b025997e5000a8492cec5e77a86828d66b4058c0d7f89fde0ab3890142 0045-druntime-link-against-libucontext-on-all-platforms.patch -c33ca2553642c2dbd1c65cd97046548f08775785a3db06d761e3bbe61398c37bc382fe132c0c3fa2101dfd4eea2a6d48bf4fae899a0ddb811c81abd7be35c122 0049-libgo-adjust-name-of-union-in-sigevent-struct.patch -179cd15d629884a66e954fd76066675efa594686b970facbb12ad50769e5d70b5530d7f61e77120e26d1c3dfc701cfc5295f341f635db998df73c41bc8e62172 0050-libphobos-don-t-define-__mode_t-twice-on-musl-target.patch -c82d7c8d340a76df3d796565a79b0ccc04ddffef39927620e1f3719bf2dc1db101ba13aef24b46c5bc95b7bf1e31c8bda4ab0936ba4c9c5e5047ba08826c982c 0051-libgo-Explicitly-define-SYS_timer_settime-for-32-bit.patch -eb403d8ea665fd5dc2c11faf43b055e6a3bf480a397ceee3e0ca1e38ec7d2392315f2694ed9a34ffbc99e464f2873fbbf91be8646ea4dea5d3636e3ea22fefa0 0052-libgnat-time_t-is-always-64-bit-on-musl-libc.patch -22fb6edf1ed0387e2b93839ffe6e82a7fee420950af90e91199c3488d966702fdeb1a3396d22be0c73a4051525da9349c93d070a0d83b724c83f2b268da6483f 0053-libgo-make-match.sh-POSIX-shell-compatible.patch -" diff --git a/test/testdata/aportgen/pmaports/cross/gcc-armhf/APKBUILD b/test/testdata/aportgen/pmaports/cross/gcc-armhf/APKBUILD deleted file mode 100644 index 8efc402e..00000000 --- a/test/testdata/aportgen/pmaports/cross/gcc-armhf/APKBUILD +++ /dev/null @@ -1,810 +0,0 @@ -# Automatically generated aport, do not edit! -# Generator: pmbootstrap aportgen gcc-armhf -# Based on: main/gcc (from Alpine) - -CTARGET_ARCH=armhf -CTARGET="$(arch_to_hostspec ${CTARGET_ARCH})" -LANG_D=false -LANG_OBJC=false -LANG_JAVA=false -LANG_GO=false -LANG_FORTRAN=false -LANG_ADA=false -options="!strip" - -# abuild doesn't try to tries to install "build-base-$CTARGET_ARCH" -# when this variable matches "no*" -BOOTSTRAP="nobuildbase" - -# abuild will only cross compile when this variable is set, but it -# needs to find a valid package database in there for dependency -# resolving, so we set it to /. -CBUILDROOT="/" - -_cross_configure="--disable-bootstrap --with-sysroot=/usr/$CTARGET" - -pkgname=gcc-armhf -_pkgbase=12.2.1 # must match gcc/BASE-VER -_pkgsnap=20220924 -pkgver=${_pkgbase}_git${_pkgsnap} -[ "$BOOTSTRAP" = "nolibc" ] && pkgname="gcc-pass2" -[ "$CBUILD" != "$CHOST" ] && _cross="-$CARCH" || _cross="" -[ "$CHOST" != "$CTARGET" ] && _target="-$CTARGET_ARCH" || _target="" - -pkgname=gcc-armhf -pkgrel=1 -pkgdesc="Stage2 cross-compiler for armhf" -url="https://gcc.gnu.org" -arch="x86_64" -license="GPL-2.0-or-later LGPL-2.1-or-later" -_gccrel=$pkgver-r$pkgrel -depends="binutils-armhf mpc1" -makedepends_build="gcc g++ bison flex texinfo gawk zip gmp-dev mpfr-dev mpc1-dev zlib-dev" -makedepends_host="linux-headers gmp-dev mpfr-dev mpc1-dev isl-dev zlib-dev musl-dev-armhf binutils-armhf" -subpackages="g++-armhf:gpp libstdc++-dev-armhf:libcxx_dev" -[ "$CHOST" = "$CTARGET" ] && subpackages="gcc-doc$_target" -replaces="libstdc++ binutils" - -: "${LANG_CXX:=true}" -: "${LANG_D:=true}" -: "${LANG_OBJC:=true}" -: "${LANG_GO:=true}" -: "${LANG_FORTRAN:=true}" -: "${LANG_ADA:=true}" -: "${LANG_JIT:=true}" - -_libgomp=true -_libgcc=false -_libatomic=true -_libitm=true - -if [ "$CHOST" != "$CTARGET" ]; then - if [ "$BOOTSTRAP" = nolibc ]; then - LANG_CXX=false - LANG_ADA=false - _libgcc=false - _builddir="$srcdir/build-cross-pass2" - else - _builddir="$srcdir/build-cross-final" - fi - LANG_OBJC=false - LANG_GO=false - LANG_FORTRAN=false - LANG_D=false - LANG_JIT=false - _libgomp=false - _libatomic=false - _libitm=false - - # reset target flags (should be set in crosscreate abuild) - # fixup flags. seems gcc treats CPPFLAGS as global without - # _FOR_xxx variants. wrap it in CFLAGS and CXXFLAGS. - export CFLAGS="$CPPFLAGS $CFLAGS" - export CXXFLAGS="$CPPFLAGS $CXXFLAGS" - unset CPPFLAGS - export CFLAGS_FOR_TARGET=" " - export CXXFLAGS_FOR_TARGET=" " - export LDFLAGS_FOR_TARGET=" " - - STRIP_FOR_TARGET="$CTARGET-strip" -elif [ "$CBUILD" != "$CHOST" ]; then - # fixup flags. seems gcc treats CPPFLAGS as global without - # _FOR_xxx variants. wrap it in CFLAGS and CXXFLAGS. - export CFLAGS="$CPPFLAGS $CFLAGS" - export CXXFLAGS="$CPPFLAGS $CXXFLAGS" - unset CPPFLAGS - - # reset flags and cc for build - export CC_FOR_BUILD="gcc" - export CXX_FOR_BUILD="g++" - export CFLAGS_FOR_BUILD=" " - export CXXFLAGS_FOR_BUILD=" " - export LDFLAGS_FOR_BUILD=" " - export CFLAGS_FOR_TARGET=" " - export CXXFLAGS_FOR_TARGET=" " - export LDFLAGS_FOR_TARGET=" " - - # Languages that do not need bootstrapping - LANG_OBJC=false - LANG_GO=false - LANG_FORTRAN=false - LANG_D=false - LANG_JIT=false - - STRIP_FOR_TARGET=${CROSS_COMPILE}strip - _builddir="$srcdir/build-cross-native" -else - STRIP_FOR_TARGET=${CROSS_COMPILE}strip - _builddir="$srcdir/build" -fi - -case "$CARCH" in -# GDC hasn't been ported to PowerPC -# See libphobos/configure.tgt in GCC sources for supported targets -# riscv fails with: error: static assert "unimplemented" -ppc64le|riscv64) LANG_D=false ;; -# GDC does currently not work on 32-bit musl architectures. -# This is a known upstream issue. -# See: https://github.com/dlang/druntime/pull/3383 -armhf|armv7|x86) LANG_D=false ;; -esac - -# libitm has TEXTRELs in ARM build, so disable for now -case "$CTARGET_ARCH" in -arm*) _libitm=false ;; -mips*) _libitm=false ;; -riscv64) _libitm=false ;; -esac - -# Internal libffi fails to build on MIPS at the moment, need to -# investigate further. We disable LANG_GO on mips64 as it requires -# the internal libffi. -case "$CTARGET_ARCH" in -mips*) LANG_GO=false ;; -esac - -# Fortran uses libquadmath if toolchain has __float128 -# currently on x86, x86_64 and ia64 -_libquadmath=$LANG_FORTRAN -case "$CTARGET_ARCH" in -x86 | x86_64) _libquadmath=$LANG_FORTRAN ;; -*) _libquadmath=false ;; -esac - -# libatomic is a dependency for openvswitch -$_libatomic && subpackages="$subpackages libatomic::$CTARGET_ARCH" -$_libgcc && subpackages="$subpackages libgcc::$CTARGET_ARCH" -$_libquadmath && subpackages="$subpackages libquadmath::$CTARGET_ARCH" -if $_libgomp; then - depends="$depends libgomp=$_gccrel" - subpackages="$subpackages libgomp::$CTARGET_ARCH" -fi - -case "$CARCH" in -riscv64) -LANG_ADA=false;; -esac - -_languages=c -if $LANG_CXX; then - _languages="$_languages,c++" -fi -if $LANG_D; then - subpackages="$subpackages libgphobos::$CTARGET_ARCH gcc-gdc$_target:gdc" - _languages="$_languages,d" - makedepends_build="$makedepends_build libucontext-dev gcc-gdc-bootstrap" -fi -if $LANG_OBJC; then - subpackages="$subpackages libobjc::$CTARGET_ARCH gcc-objc$_target:objc" - _languages="$_languages,objc" -fi -if $LANG_GO; then - subpackages="$subpackages libgo::$CTARGET_ARCH gcc-go$_target:go" - _languages="$_languages,go" -fi -if $LANG_FORTRAN; then - subpackages="$subpackages libgfortran::$CTARGET_ARCH gfortran$_target:gfortran" - _languages="$_languages,fortran" -fi -if $LANG_ADA; then - subpackages="$subpackages gcc-gnat$_target:gnat" - _languages="$_languages,ada" - if [ "$CBUILD" = "$CTARGET" ]; then - makedepends_build="$makedepends_build gcc-gnat-bootstrap" - subpackages="$subpackages libgnat-static:libgnatstatic:$CTARGET_ARCH libgnat::$CTARGET_ARCH" - else - subpackages="$subpackages libgnat::$CTARGET_ARCH" - makedepends_build="$makedepends_build gcc-gnat gcc-gnat$_cross" - fi -fi -if $LANG_JIT; then - subpackages="$subpackages libgccjit:jit libgccjit-dev:jitdev" -fi -makedepends="$makedepends_build $makedepends_host" - -# when using upstream releases, use this URI template -# https://gcc.gnu.org/pub/gcc/releases/gcc-${_pkgbase:-$pkgver}/gcc-${_pkgbase:-$pkgver}.tar.xz -# -# right now, we are using a git snapshot. snapshots are taken from gcc.gnu.org/pub/gcc/snapshots. -# However, since they are periodically deleted from the GCC mirrors the utilized snapshots are -# mirrored on dev.alpinelinux.org. Please ensure that the snapshot Git commit (as stated in the -# README) matches the base commit on the version-specific branch in the Git repository below. -# -# PLEASE submit all patches to gcc to https://gitlab.alpinelinux.org/kaniini/alpine-gcc-patches, -# so that they can be properly tracked and easily rebased if needed. -source="https://dev.alpinelinux.org/archive/gcc/${_pkgbase%%.*}-${_pkgsnap}/gcc-${_pkgbase%%.*}-${_pkgsnap}.tar.xz - 0001-posix_memalign.patch - 0002-gcc-poison-system-directories.patch - 0003-specs-turn-on-Wl-z-now-by-default.patch - 0004-Turn-on-D_FORTIFY_SOURCE-2-by-default-for-C-C-ObjC-O.patch - 0005-On-linux-targets-pass-as-needed-by-default-to-the-li.patch - 0006-Enable-Wformat-and-Wformat-security-by-default.patch - 0007-Enable-Wtrampolines-by-default.patch - 0008-Disable-ssp-on-nostdlib-nodefaultlibs-and-ffreestand.patch - 0009-Ensure-that-msgfmt-doesn-t-encounter-problems-during.patch - 0010-Don-t-declare-asprintf-if-defined-as-a-macro.patch - 0011-libiberty-copy-PIC-objects-during-build-process.patch - 0012-libitm-disable-FORTIFY.patch - 0013-libgcc_s.patch - 0014-nopie.patch - 0015-dlang-use-libucontext-on-mips64.patch - 0016-ada-fix-shared-linking.patch - 0017-build-fix-CXXFLAGS_FOR_BUILD-passing.patch - 0018-add-fortify-headers-paths.patch - 0019-Alpine-musl-package-provides-libssp_nonshared.a.-We-.patch - 0020-DP-Use-push-state-pop-state-for-gold-as-well-when-li.patch - 0021-mips64-disable-multilib-support.patch - 0022-aarch64-disable-multilib-support.patch - 0023-s390x-disable-multilib-support.patch - 0024-ppc64-le-disable-multilib-support.patch - 0025-x86_64-disable-multilib-support.patch - 0026-riscv-disable-multilib-support.patch - 0027-always-build-libgcc_eh.a.patch - 0028-ada-libgnarl-compatibility-for-musl.patch - 0029-ada-musl-support-fixes.patch - 0033-gcc-go-link-to-libucontext.patch - 0034-Use-generic-errstr.go-implementation-on-musl.patch - 0035-configure-Add-enable-autolink-libatomic-use-in-LINK_.patch - 0036-configure-fix-detection-of-atomic-builtins-in-libato.patch - 0037-libgo-Recognize-off64_t-and-loff_t-definitions-of-mu.patch - 0039-gcc-go-Use-int64-type-as-offset-argument-for-mmap.patch - 0041-go-gospec-forcibly-disable-fsplit-stack-support.patch - 0042-gcc-go-fix-build-error-with-SYS_SECCOMP.patch - 0043-libstdc-do-not-throw-exceptions-for-non-C-locales-on.patch - 0044-gdc-unconditionally-link-libgphobos-against-libucont.patch - 0045-druntime-link-against-libucontext-on-all-platforms.patch - 0049-libgo-adjust-name-of-union-in-sigevent-struct.patch - 0050-libphobos-don-t-define-__mode_t-twice-on-musl-target.patch - 0051-libgo-Explicitly-define-SYS_timer_settime-for-32-bit.patch - 0052-libgnat-time_t-is-always-64-bit-on-musl-libc.patch - 0053-libgo-make-match.sh-POSIX-shell-compatible.patch - " - -# we build out-of-tree -_gccdir="$srcdir"/gcc-${_pkgbase%%.*}-${_pkgsnap} -_gcclibdir="/usr/lib/gcc/$CTARGET/${_pkgbase:-$pkgver}" -_gcclibexec="/usr/libexec/gcc/$CTARGET/${_pkgbase:-$pkgver}" - -prepare() { - cd "$_gccdir" - - _err= - for i in $source; do - case "$i" in - *.patch) - msg "Applying $i" - patch -p1 -i "$srcdir"/$i || _err="$_err $i" - ;; - esac - done - - if [ -n "$_err" ]; then - error "The following patches failed:" - for i in $_err; do - echo " $i" - done - return 1 - fi - - echo ${_pkgbase:-$pkgver} > gcc/BASE-VER -} - -build() { - local _arch_configure= - local _libc_configure=