Compare commits

...

43 Commits

Author SHA1 Message Date
topjohnwu
64752f38e8 Do not unwrap when getting decoder and encoder
Or else things will crash mysteriously when unexpected input occurs
2025-12-05 03:40:18 -08:00
topjohnwu
9ac4b5ce7d Add proper lzma format detection 2025-12-05 03:40:18 -08:00
topjohnwu
505053f9b4 Properly support AVD with minor SDK version 2025-12-04 20:55:46 -08:00
topjohnwu
ccb264f33a Release Magisk v30.6
[skip ci]
2025-12-01 15:46:33 -08:00
topjohnwu
84f7d75d30 Update release.sh
Strip out all canary build logic
2025-12-01 15:27:01 -08:00
南宫雪珊
9a776c22d9 Revert "Use rootfs for magisktmp if possible" 2025-12-01 11:45:34 -08:00
topjohnwu
363566d0d5 Release Magisk v30.5
[skip ci]
2025-12-01 01:52:46 -08:00
topjohnwu
d9dc459bf4 Update system_properties
Fix #9408
2025-12-01 00:25:39 -08:00
topjohnwu
5d6b703622 Move occupy and unoccupy out of base crate 2025-11-29 00:13:41 -08:00
topjohnwu
f7ce9c38e1 Run through clippy and rustfmt 2025-11-29 00:13:41 -08:00
LoveSy
bdbfb40383 Use rootfs for magisktmp if possible 2025-11-29 00:13:41 -08:00
topjohnwu
283fc0f46f Update cargo dependencies 2025-11-28 21:00:48 -08:00
topjohnwu
2c24a41bf2 Update gradle dependencies 2025-11-27 03:36:49 -08:00
topjohnwu
97c93a1f4d Smaller release binary size 2025-11-27 02:00:54 -08:00
topjohnwu
8d534e6de8 Update cxx-rs 2025-11-25 02:29:45 -08:00
topjohnwu
3a60ef2039 Update to ONDK r29.3 2025-11-21 13:28:46 -08:00
Wang Han
52d7eff03f Fix splice direction for ptmx out stream 2025-11-19 15:14:59 -08:00
topjohnwu
020e23ea13 Disable help triggers on subcommands 2025-11-03 16:16:49 -08:00
topjohnwu
1599bfc2c5 Update dependencies 2025-11-02 13:52:32 -08:00
Wang Han
c8d51b38ba Enhance fdt_header validation for empty dtb 2025-11-02 02:42:48 -08:00
Wang Han
f741a4aeb8 Free regex resources in plt_hook_commit
Free regex resources for registered and ignored hooks before clearing the lists.
2025-11-02 01:59:03 -08:00
topjohnwu
4ee2235961 Update dependencies 2025-10-20 10:30:09 -07:00
topjohnwu
536e50c6e0 Support optional trailing positional arguments 2025-10-19 17:15:30 -07:00
topjohnwu
57d9fc6099 Support short only options and switches 2025-10-19 17:15:30 -07:00
topjohnwu
52d8910bdd Cleanup code for EarlyExit during help triggers 2025-10-19 17:15:30 -07:00
topjohnwu
c94bd49a89 Update default help triggers 2025-10-19 17:15:30 -07:00
topjohnwu
b72ba6759e Vendor argh sources
Further customization will come in future commits
2025-10-19 17:15:30 -07:00
topjohnwu
5bcb55b7fc Format Rust imports with rustfmt 2025-10-19 17:15:30 -07:00
topjohnwu
0dc8231585 Make all dependencies workspace = true 2025-10-19 17:15:30 -07:00
Wang Han
470acc93c9 Remove clickable attribute from item_module_md2.xml 2025-10-19 14:02:02 -07:00
Wang Han
0edb80b10f Set module card as non clickable
It's so easy to mis-click card.
2025-10-19 14:02:02 -07:00
topjohnwu
bcc6296d94 Build debug without debug-info 2025-10-03 00:16:17 -07:00
topjohnwu
c3db2e368d Release Magisk v30.4
[skip ci]
2025-10-02 04:30:47 -07:00
topjohnwu
d37da5ca66 Cleanup code 2025-10-02 04:18:20 -07:00
topjohnwu
aac52176ed Support API level as floating point 2025-10-02 04:10:22 -07:00
topjohnwu
78e2fc37e5 Add easy knobs to disable security checks 2025-10-02 04:09:46 -07:00
Wang Han
ca2e40593f Make fetchUpdate safe 2025-10-02 04:03:44 -07:00
LoveSy
c07fdc87e3 Handle second splice() failure gracefully 2025-10-02 04:03:27 -07:00
topjohnwu
7270f5e413 Several minor fixes/improvements 2025-10-02 04:03:08 -07:00
topjohnwu
07cc85ccb1 Default initialize before swap in move constructor
Fix #9373, fix #9384, fix #9400, fix #9404
2025-10-02 04:03:08 -07:00
topjohnwu
d6f17c42d5 Fix logging implementation error 2025-10-02 04:03:08 -07:00
Wang Han
d60806f429 Only reset NB prop when zygisk is enabled 2025-10-02 03:19:32 -07:00
Mohammad Hasan Keramat J
8836a09c8c core: Update Persian translation 2025-09-30 00:21:44 -07:00
74 changed files with 3685 additions and 666 deletions

View File

@@ -73,7 +73,8 @@ class InstallViewModel(svc: NetworkService, markwon: Markwon) : BaseViewModel()
val noteText = when {
noteFile.exists() -> noteFile.readText()
else -> {
val note = svc.fetchUpdate(APP_VERSION_CODE).note
val note = svc.fetchUpdate(APP_VERSION_CODE)?.note.orEmpty()
if (note.isEmpty()) return@launch
noteFile.writeText(note)
note
}

View File

@@ -27,10 +27,8 @@
isEnabled="@{!item.removed && item.enabled && !item.showNotice}"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:clickable="@{!item.removed && item.enabled && !item.showNotice}"
android:focusable="@{!item.removed && item.enabled && !item.showNotice}"
android:nextFocusRight="@id/module_indicator"
android:onClick="@{() -> item.setEnabled(!item.enabled)}"
app:cardBackgroundColor="@color/color_card_background_color_selector"
tools:isEnabled="false"
tools:layout_gravity="center"

View File

@@ -55,7 +55,7 @@ fun Project.setupCommon() {
compileSdkVersion(36)
buildToolsVersion = "36.0.0"
ndkPath = "$sdkDirectory/ndk/magisk"
ndkVersion = "29.0.13846066"
ndkVersion = "29.0.14206865"
defaultConfig {
minSdk = 23

View File

@@ -19,7 +19,7 @@ abstract class SuLogDatabase : RoomDatabase() {
companion object {
val MIGRATION_1_2 = object : Migration(1, 2) {
override fun migrate(database: SupportSQLiteDatabase) = with(database) {
override fun migrate(db: SupportSQLiteDatabase) = with(db) {
execSQL("ALTER TABLE logs ADD COLUMN target INTEGER NOT NULL DEFAULT -1")
execSQL("ALTER TABLE logs ADD COLUMN context TEXT NOT NULL DEFAULT ''")
execSQL("ALTER TABLE logs ADD COLUMN gids TEXT NOT NULL DEFAULT ''")

View File

@@ -44,7 +44,7 @@ object ServiceLocator {
private fun createSuLogDatabase(context: Context) =
Room.databaseBuilder(context, SuLogDatabase::class.java, "sulogs.db")
.addMigrations(SuLogDatabase.MIGRATION_1_2)
.fallbackToDestructiveMigration()
.fallbackToDestructiveMigration(true)
.build()
private fun createMarkwon(context: Context) =

View File

@@ -34,7 +34,7 @@ data class ModuleJson(
@JsonClass(generateAdapter = true)
data class ReleaseAssets(
val name: String,
@Json(name = "browser_download_url") val url: String,
@param:Json(name = "browser_download_url") val url: String,
)
class DateTimeAdapter {
@@ -51,12 +51,12 @@ class DateTimeAdapter {
@JsonClass(generateAdapter = true)
data class Release(
@Json(name = "tag_name") val tag: String,
@param:Json(name = "tag_name") val tag: String,
val name: String,
val prerelease: Boolean,
val assets: List<ReleaseAssets>,
val body: String,
@Json(name = "created_at") val createdTime: Instant,
@param:Json(name = "created_at") val createdTime: Instant,
) {
val versionCode: Int get() {
return if (tag[0] == 'v') {

View File

@@ -41,7 +41,9 @@ class NetworkService(
info
}
suspend fun fetchUpdate(version: Int) = findRelease { it.versionCode == version }.asInfo()
suspend fun fetchUpdate(version: Int) = safe {
findRelease { it.versionCode == version }.asInfo()
}
// Keep going through all release pages until we find a match
private suspend inline fun findRelease(predicate: (Release) -> Boolean): Release? {

View File

@@ -8,6 +8,7 @@
<string name="install">نصب</string>
<string name="section_home">خانه</string>
<string name="section_theme">تم ها</string>
<string name="denylist">لیست منع</string>
<!--Home-->
<string name="no_connection">هیچ اتصالی وجود ندارد</string>
@@ -17,7 +18,9 @@
<string name="not_available">غیر/قابل دسترسی</string>
<string name="hide">پنهان کردن</string>
<string name="home_package">پکیج</string>
<string name="home_app_title">برنامه</string>
<string name="home_notice_content">Magisk را فقط از صفحه رسمی GitHub دانلود کنید. فایل‌ها از منابع ناشناس می‌توانند مخرب باشند!</string>
<string name="home_follow_title">ما را دنبال کنید</string>
<string name="home_support_title">حمایت ما</string>
<string name="home_item_source">منبع</string>
<string name="home_support_content">این برنامه (Magisk) رایگان و متن باز است و همیشه خواهد ماند. اگرچه شما میتواند با دونیت خود نشان دهد که به ما اهمیت می دهید.</string>
@@ -47,8 +50,10 @@
<!--Superuser-->
<string name="su_request_title">درخواست کاربر روت</string>
<string name="touch_filtered_warning">به دلیل اینکه یک برنامه در حال پوشاندن درخواست Superuser است، Magisk نمی‌تواند پاسخ شما را تأیید کند.</string>
<string name="deny">رد کردن</string>
<string name="prompt">درخواست کردن</string>
<string name="restrict">محدود کردن</string>
<string name="grant">اجازه دادن</string>
<string name="su_warning">دسترسی کامل به دستگاه شما را اعطا می کند. \nاگر مطمئن نیستید رد کنید!</string>
<string name="forever">همیشه</string>
@@ -67,37 +72,50 @@
<string name="su_snack_log_off">ورود به سیستم از %1$s غییر فعال است</string>
<string name="su_revoke_title">باطل بشه؟</string>
<string name="su_revoke_msg">تایید کنید که %1$s باطل بشه؟</string>
<string name="toast">پیام کوتاه</string>
<string name="none">هیچ کدام</string>
<string name="superuser_toggle_notification">اعلان ها</string>
<string name="superuser_toggle_revoke">ابطال</string>
<string name="superuser_policy_none">هنوز هیچ برنامه ای مجوز روت درخواست نکرده است.</string>
<!--Logs-->
<string name="log_data_none">شما هیچ لاگی ندارید, سعی کنید برنامه های که به روت دسترسی میگیرند را استفاده کنید.</string>
<string name="log_data_magisk_none">لاگ های مربوط به Magisk خالی است. پنا بر خدا.</string>
<string name="log_data_magisk_none">لاگ های مربوط به Magisk خالی است.</string>
<string name="menuSaveLog">ذخیره کردن لاگ</string>
<string name="menuClearLog">پاک کردن لاگ</string>
<string name="logs_cleared">لاگ با موفقیت پاک شد.</string>
<!--SafetyNet-->
<string name="pid">شناسه پردازش: %1$d</string>
<string name="target_uid">شناسه کاربر هدف: %1$d</string>
<string name="target_pid">شناسه پردازش هدف: %s</string>
<string name="selinux_context">متن SELinux: %s</string>
<string name="supp_group">گروه تکمیلی: %s</string>
<!-- MagiskHide -->
<string name="show_system_app">نشان دادن برنامه های سیستمی</string>
<string name="show_os_app">نمایش برنامه‌های سیستم عامل</string>
<string name="hide_filter_hint">فیلتر کردن با نام</string>
<string name="hide_search">سرچ کردن</string>
<!--Module -->
<string name="no_info_provided">(هیچ اطلاعاتی ارائه نشده است)</string>
<string name="reboot_userspace">راه اندازی مججد</string>
<string name="reboot_recovery">راه اندازی مججد برای رفتن به ریکاوری</string>
<string name="reboot_bootloader">راه اندازی مججد برای رفتن به بوت لودر</string>
<string name="reboot_download">راه اندازی مججد برای دانلود کردن</string>
<string name="reboot_edl">راه اندازی مججد برای رفتن به EDL</string>
<string name="reboot_safe_mode">راه اندازی مججد برای رفتن به حالت امن</string>
<string name="module_version_author">%1$s با %2$s</string>
<string name="module_state_remove">حذف کردن</string>
<string name="module_state_restore">بازگرداندن</string>
<string name="module_action_install_external">نصب از حافظه</string>
<string name="update_available">بروزرسانی در دسترس است</string>
<string name="suspend_text_riru">ماژول به دلیل فعال بودن %1$s متوقف شد</string>
<string name="suspend_text_zygisk">ماژول به دلیل غیرفعال بودن %1$s متوقف شد</string>
<string name="zygisk_module_unloaded">ماژول Zygisk به دلیل ناسازگاری بارگذاری نشد</string>
<string name="module_empty">هیچ ماژولی نصب نشده است</string>
<string name="confirm_install">نصب ماژول %1$s؟</string>
<string name="confirm_install_title">تأیید نصب</string>
<!--Settings -->
<string name="settings_dark_mode_title">حالت تم</string>
@@ -107,6 +125,10 @@
<string name="settings_dark_mode_dark">همیشه تاریک</string>
<string name="settings_download_path_title">مسیر دانلود</string>
<string name="settings_download_path_message">فایل ها در %1$s ذخیره خواهند شد.</string>
<string name="settings_hide_app_title">مخفی کردن برنامه Magisk</string>
<string name="settings_hide_app_summary">نصب یک برنامه پروکسی با شناسه بسته تصادفی و برچسب سفارشی</string>
<string name="settings_restore_app_title">بازگردانی برنامه Magisk</string>
<string name="settings_restore_app_summary">آشکار کردن برنامه و بازگرداندن APK اصلی</string>
<string name="language">زبان</string>
<string name="system_default">(پیش فرض سیستم)</string>
<string name="settings_check_update_title">چک کردن بروز رسانی ها</string>
@@ -114,8 +136,14 @@
<string name="settings_update_channel_title">کانال بروزرسانی</string>
<string name="settings_update_stable">پایدار</string>
<string name="settings_update_beta">آزمایشی</string>
<string name="settings_update_debug">اشکال‌زدایی</string>
<string name="settings_update_custom">شخصی سازی شده</string>
<string name="settings_update_custom_msg">اضافه کردن یک URL سفارشی</string>
<string name="settings_zygisk_summary">اجرای بخش‌هایی از Magisk در سرویس Zygote</string>
<string name="settings_denylist_title">اعمال لیست منع</string>
<string name="settings_denylist_summary">فرآیندهای موجود در لیست منع تمام تغییرات Magisk را از دست خواهند داد</string>
<string name="settings_denylist_config_title">پیکربندی لیست منع</string>
<string name="settings_denylist_config_summary">انتخاب فرآیندهایی که باید در لیست منع قرار گیرند</string>
<string name="settings_hosts_title">نصب بدون حذف یا تغییر در فایل ها</string>
<string name="settings_hosts_summary">نصب بدون حذف یا تغییر در فایل ها رای ساپورت از برنامه های Adblock</string>
<string name="settings_hosts_toast">ماژول نصب بدون حذف یا تغییر در فایل ها اضافه شد</string>
@@ -138,9 +166,19 @@
<string name="superuser_notification">اعلان روت</string>
<string name="settings_su_reauth_title">احراز هویت دوباره پس از بروز رسانی</string>
<string name="settings_su_reauth_summary">تأیید کردندوباره مجوزهای روت پس از ارتقاء برنامه</string>
<string name="settings_su_tapjack_title">محافظت در برابر Tapjacking</string>
<string name="settings_su_tapjack_summary">پنجره درخواست Superuser زمانی که توسط پنجره یا لایه دیگری پوشانده شود، به ورودی پاسخ نخواهد داد</string>
<string name="settings_su_auth_title">احراز هویت کاربر</string>
<string name="settings_su_auth_summary">درخواست احراز هویت کاربر هنگام درخواست Superuser</string>
<string name="settings_su_auth_insecure">هیچ روش احراز هویتی روی دستگاه پیکربندی نشده است</string>
<string name="settings_su_restrict_title">محدود کردن دسترسی روت</string>
<string name="settings_su_restrict_summary">به طور پیش‌فرض برنامه‌های Superuser جدید را محدود می‌کند. هشدار: این کار بیشتر برنامه‌ها را از کار می‌اندازد. فقط اگر دقیقاً می‌دانید چه می‌کنید آن را فعال کنید.</string>
<string name="settings_customization">سفارشی سازی</string>
<string name="setting_add_shortcut_summary">اضافه کردن یک میانبر زیبا را در صفحه اصلی در صورت شناسایی نام و نماد پس از پنهان کردن برنامه</string>
<string name="settings_doh_title">DNS روی HTTPS</string>
<string name="settings_doh_description">دور زدن مسمومیت DNS در برخی کشورها</string>
<string name="settings_random_name_title">تغییر تصادفی نام خروجی</string>
<string name="settings_random_name_description">تغییر تصادفی نام فایل خروجی تصاویر و فایل‌های tar پچ‌شده برای جلوگیری از شناسایی</string>
<string name="multiuser_mode">حالت چند کاربره</string>
<string name="settings_owner_only">فقط صاحب دستگاه</string>
<string name="settings_owner_manage">صاحب دستگاه مدیریت شود</string>
@@ -148,7 +186,6 @@
<string name="owner_only_summary">فقط مالک دسترسی روت دارد</string>
<string name="owner_manage_summary">فقط مالک می تواند دسترسی روت را مدیریت کرده و درخواست های پرامپت را دریافت کند</string>
<string name="user_independent_summary">هر کاربر قوانین روت جداگانه خود را دارد</string>
<string name="mount_namespace_mode">نصب کردن Namespace Mode</string>
<string name="settings_ns_global">سراسری Namespace</string>
<string name="settings_ns_requester">وراثتی Namespace</string>
@@ -160,19 +197,27 @@
<!--Notifications-->
<string name="update_channel">Magisk بروزرسانی های</string>
<string name="progress_channel">اعلان پیشرفت</string>
<string name="updated_channel">به‌روزرسانی کامل شد</string>
<string name="download_complete">دانلود کامل شد</string>
<string name="download_file_error">خطا در دانلود فایل</string>
<string name="magisk_update_title">بروزرسانی Magisk در دسترس است!</string>
<string name="updated_title">Magisk به‌روزرسانی شد</string>
<string name="updated_text">برای باز کردن برنامه لمس کنید</string>
<!--Toasts, Dialogs-->
<string name="yes">بله</string>
<string name="no">نه</string>
<string name="download">انلود کردن</string>
<string name="repo_install_title">نصب %1$s %2$s(%3$d)</string>
<string name="download">دانلود کردن</string>
<string name="reboot">راه اندازی مجدد</string>
<string name="close">بستن</string>
<string name="release_notes">نکته های نسخه</string>
<string name="flashing">ر حال فلش کردن…</string>
<string name="running">در حال اجرا…</string>
<string name="done">تمام!</string>
<string name="done_action">انجام عملیات %1$s به پایان رسید</string>
<string name="failure">ناموفق</string>
<string name="hide_app_title">در حال مخفی کردن برنامه Magisk…</string>
<string name="open_link_failed_toast">هیچ برنامه ای برای باز کردن لینک یافت نشد</string>
<string name="complete_uninstall">کامل کردن حذف</string>
<string name="restore_img">بازیابی تصاویر</string>
@@ -181,9 +226,24 @@
<string name="restore_fail">نسخه پشتیبان استک موجود نیست!</string>
<string name="setup_fail">نصب انجام نشد</string>
<string name="env_fix_title">به تنظیمات اضافی نیاز دارد</string>
<string name="env_fix_msg">دستگاه شما به پیکربندی اضافی نیاز دارد تا Magisk به درستی کار کند. آیا می‌خواهید ادامه دهید و راه‌اندازی مجدد انجام شود؟</string>
<string name="env_full_fix_msg">دستگاه شما نیاز به نصب دوباره Magisk دارد تا به درستی کار کند. لطفاً Magisk را از داخل برنامه دوباره نصب کنید، حالت Recovery نمی‌تواند اطلاعات دستگاه را به درستی بگیرد.</string>
<string name="setup_msg">راه اندازی محیط نصب…</string>
<string name="unsupport_magisk_title">نسخه پشتیبانی نشده Magisk</string>
<string name="unsupport_magisk_msg">این نسخه از برنامه از نسخه‌های Magisk پایین‌تر از %1$s پشتیبانی نمی‌کند.\n\nبرنامه طوری رفتار می‌کند که انگار Magisk نصب نشده است. لطفاً هرچه سریع‌تر Magisk را به‌روزرسانی کنید.</string>
<string name="unsupport_general_title">وضعیت غیرعادی</string>
<string name="unsupport_system_app_msg">اجرای این برنامه به عنوان برنامه سیستمی پشتیبانی نمی‌شود. لطفاً آن را به برنامه کاربری بازگردانید.</string>
<string name="unsupport_other_su_msg">یک باینری "su" غیر از Magisk شناسایی شد. لطفاً هر راهکار روت دیگری را حذف کنید و/یا Magisk را دوباره نصب کنید.</string>
<string name="unsupport_external_storage_msg">Magisk روی حافظه خارجی نصب شده است. لطفاً برنامه را به حافظه داخلی منتقل کنید.</string>
<string name="unsupport_nonroot_stub_msg">برنامه مخفی Magisk نمی‌تواند ادامه دهد زیرا دسترسی روت از بین رفته است. لطفاً APK اصلی را بازگردانید.</string>
<string name="unsupport_nonroot_stub_title">@string/settings_restore_app_title</string>
<string name="external_rw_permission_denied">برای فعال کردن این قابلیت ، اجازه دسترسی به حافظه بدهید</string>
<string name="post_notifications_denied">برای فعال‌سازی این قابلیت، مجوز اعلان‌ها را بدهید</string>
<string name="install_unknown_denied">برای فعال‌سازی این قابلیت، «نصب برنامه‌های ناشناخته» را مجاز کنید</string>
<string name="add_shortcut_title">اضافه کردن میانبر را به صفحه</string>
<string name="add_shortcut_msg">بعد از مخفی کردن این برنامه، ممکن است نام و آیکون آن سخت قابل شناسایی شود. آیا می‌خواهید یک میانبر زیبا به صفحه اصلی اضافه کنید؟</string>
<string name="app_not_found">هیچ برنامه‌ای برای انجام این عملیات یافت نشد</string>
<string name="reboot_apply_change">برای اعمال تغییرات، دستگاه را دوباره راه‌اندازی کنید</string>
<string name="restore_app_confirmation">این کار برنامه مخفی شده را به نسخه اصلی بازمی‌گرداند. آیا واقعاً می‌خواهید این کار را انجام دهید؟</string>
</resources>

View File

@@ -30,4 +30,4 @@ android.nonFinalResIds=false
# Magisk
magisk.stubVersion=40
magisk.versionCode=30300
magisk.versionCode=30600

View File

@@ -1,16 +1,16 @@
[versions]
kotlin = "2.2.20"
android = "8.13.0"
ksp = "2.2.20-2.0.2"
kotlin = "2.2.21"
android = "8.13.1"
ksp = "2.3.3"
rikka = "1.3.0"
navigation = "2.9.4"
navigation = "2.9.6"
libsu = "6.0.0"
okhttp = "5.1.0"
okhttp = "5.3.2"
retrofit = "3.0.0"
room = "2.8.0"
room = "2.8.4"
[libraries]
bcpkix = { module = "org.bouncycastle:bcpkix-jdk18on", version = "1.82" }
bcpkix = { module = "org.bouncycastle:bcpkix-jdk18on", version = "1.83" }
commons-compress = { module = "org.apache.commons:commons-compress", version = "1.28.0" }
retrofit = { module = "com.squareup.retrofit2:retrofit", version.ref = "retrofit" }
retrofit-moshi = { module = "com.squareup.retrofit2:converter-moshi", version.ref = "retrofit" }
@@ -23,10 +23,10 @@ timber = { module = "com.jakewharton.timber:timber", version = "5.0.1" }
jgit = { module = "org.eclipse.jgit:org.eclipse.jgit", version = "7.1.0.202411261347-r" }
# AndroidX
activity = { module = "androidx.activity:activity", version = "1.11.0" }
activity = { module = "androidx.activity:activity", version = "1.12.0" }
appcompat = { module = "androidx.appcompat:appcompat", version = "1.7.1" }
core-ktx = { module = "androidx.core:core-ktx", version = "1.17.0" }
core-splashscreen = { module = "androidx.core:core-splashscreen", version = "1.0.1" }
core-splashscreen = { module = "androidx.core:core-splashscreen", version = "1.2.0" }
constraintlayout = { module = "androidx.constraintlayout:constraintlayout", version = "2.2.1" }
fragment-ktx = { module = "androidx.fragment:fragment-ktx", version = "1.8.9" }
navigation-fragment-ktx = { module = "androidx.navigation:navigation-fragment-ktx", version.ref = "navigation" }
@@ -62,6 +62,6 @@ android-gradle-plugin = { module = "com.android.tools.build:gradle", version.ref
ksp-plugin = { module = "com.google.devtools.ksp:com.google.devtools.ksp.gradle.plugin", version.ref = "ksp" }
navigation-safe-args-plugin = { module = "androidx.navigation:navigation-safe-args-gradle-plugin", version.ref = "navigation" }
lsparanoid-plugin = { module = "org.lsposed.lsparanoid:gradle-plugin", version = "0.6.0" }
moshi-plugin = { module = "dev.zacsweers.moshix:dev.zacsweers.moshix.gradle.plugin", version = "0.32.0" }
moshi-plugin = { module = "dev.zacsweers.moshix:dev.zacsweers.moshix.gradle.plugin", version = "0.34.1" }
[plugins]

View File

@@ -80,7 +80,7 @@ support_targets = {"magisk", "magiskinit", "magiskboot", "magiskpolicy", "resetp
default_targets = support_targets - {"resetprop"}
rust_targets = default_targets.copy()
clean_targets = {"native", "cpp", "rust", "app"}
ondk_version = "r29.2"
ondk_version = "r29.3"
# Global vars
config = {}

View File

@@ -1,5 +1,18 @@
# Magisk Changelog
### v30.6 (2025.12.1)
- [MagiskInit] Revert a change that could result in bootloops
### v30.5 (2025.12.1)
- [General] Improve commandline argument parsing logic
- [resetprop] Properly support Android versions with property overrides
### v30.4 (2025.10.2)
- [MagiskSU] Fix several implementation bugs
### v30.3 (2025.9.29)
- [General] Support installing Magisk into vendor_boot partition

View File

@@ -10,3 +10,6 @@ target-dir = "../out/rust"
build-std = ["std", "panic_abort"]
build-std-features = ["panic_immediate_abort", "optimize_for_size"]
profile-rustflags = true
[profile.release]
rustflags = ["-Z", "location-detail=none", "-Z", "fmt-debug=none"]

413
native/src/Cargo.lock generated
View File

@@ -10,38 +10,9 @@ checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
[[package]]
name = "anstyle"
version = "1.0.11"
version = "1.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd"
[[package]]
name = "argh"
version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34ff18325c8a36b82f992e533ece1ec9f9a9db446bd1c14d4f936bac88fcd240"
dependencies = [
"argh_derive",
"argh_shared",
"rust-fuzzy-search",
]
[[package]]
name = "argh_derive"
version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adb7b2b83a50d329d5d8ccc620f5c7064028828538bdf5646acd60dc1f767803"
dependencies = [
"argh_shared",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "argh_shared"
version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a464143cc82dedcdc3928737445362466b7674b5db4e2eb8e869846d6d84f4f6"
checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78"
[[package]]
name = "autocfg"
@@ -53,13 +24,13 @@ checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
name = "base"
version = "0.0.0"
dependencies = [
"argh",
"bitflags",
"bytemuck",
"cfg-if",
"const_format",
"cxx",
"cxx-gen",
"derive",
"libc",
"nix",
"num-derive",
@@ -96,9 +67,9 @@ checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
[[package]]
name = "bitflags"
version = "2.9.4"
version = "2.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394"
checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3"
[[package]]
name = "block-buffer"
@@ -111,9 +82,9 @@ dependencies = [
[[package]]
name = "block-buffer"
version = "0.11.0-rc.5"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e9ef36a6fcdb072aa548f3da057640ec10859eb4e91ddf526ee648d50c76a949"
checksum = "96eb4cdd6cf1b31d671e9efe75c5d1ec614776856cefbe109ca373554a6d514f"
dependencies = [
"hybrid-array",
]
@@ -126,18 +97,18 @@ checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
[[package]]
name = "bytemuck"
version = "1.23.2"
version = "1.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3995eaeebcdf32f91f980d360f78732ddc061097ab4e39991ae7a6ace9194677"
checksum = "1fbdf580320f38b612e485521afda1ee26d10cc9884efaaa750d383e13e3c5f4"
dependencies = [
"bytemuck_derive",
]
[[package]]
name = "bytemuck_derive"
version = "1.10.1"
version = "1.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4f154e572231cb6ba2bd1176980827e3d5dc04cc183a75dea38109fbdd672d29"
checksum = "f9abbd1bc6865053c427f7198e6af43bfdedc55ab791faed4fbd361d789575ff"
dependencies = [
"proc-macro2",
"quote",
@@ -152,18 +123,18 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
name = "bzip2"
version = "0.6.0"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bea8dcd42434048e4f7a304411d9273a411f647446c1234a65ce0554923f4cff"
checksum = "f3a53fac24f34a81bc9954b5d6cfce0c21e18ec6959f44f56e8e90e4bb7c346c"
dependencies = [
"libbz2-rs-sys",
]
[[package]]
name = "cc"
version = "1.2.39"
version = "1.2.48"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1354349954c6fc9cb0deab020f27f783cf0b604e8bb754dc4658ecf0d29c35f"
checksum = "c481bdbf0ed3b892f6f806287d72acd515b352a4ec27a208489b8c1bc839633a"
dependencies = [
"find-msvc-tools",
"shlex",
@@ -171,9 +142,9 @@ dependencies = [
[[package]]
name = "cfg-if"
version = "1.0.3"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9"
checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801"
[[package]]
name = "cfg_aliases"
@@ -183,18 +154,18 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]]
name = "clap"
version = "4.5.48"
version = "4.5.53"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2134bb3ea021b78629caa971416385309e0131b351b25e01dc16fb54e1b5fae"
checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8"
dependencies = [
"clap_builder",
]
[[package]]
name = "clap_builder"
version = "4.5.48"
version = "4.5.53"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2ba64afa3c0a6df7fa517765e31314e983f51dda798ffba27b988194fb65dc9"
checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00"
dependencies = [
"anstyle",
"clap_lex",
@@ -203,15 +174,15 @@ dependencies = [
[[package]]
name = "clap_lex"
version = "0.7.5"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675"
checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d"
[[package]]
name = "codespan-reporting"
version = "0.12.0"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe6d2e5af09e8c8ad56c969f2157a3d4238cebc7c55f0a517728c38f7b200f81"
checksum = "af491d569909a7e4dee0ad7db7f5341fef5c614d5b8ec8cf765732aba3cff681"
dependencies = [
"serde",
"termcolor",
@@ -226,9 +197,9 @@ checksum = "0dabb6555f92fb9ee4140454eb5dcd14c7960e1225c6d1a6cc361f032947713e"
[[package]]
name = "const_format"
version = "0.2.34"
version = "0.2.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "126f97965c8ad46d6d9163268ff28432e8f6a1196a55578867832e3049df63dd"
checksum = "7faa7469a93a566e9ccc1c73fe783b4a65c274c5ace346038dca9c39fe0030ad"
dependencies = [
"const_format_proc_macros",
]
@@ -255,9 +226,9 @@ dependencies = [
[[package]]
name = "crc"
version = "3.3.0"
version = "3.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675"
checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d"
dependencies = [
"crc-catalog",
]
@@ -279,9 +250,9 @@ dependencies = [
[[package]]
name = "crypto-bigint"
version = "0.7.0-rc.7"
version = "0.7.0-rc.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0bfcfbe68dec4e49b4e93da8f091ce39556549554905fcb07308f6eeefae46c"
checksum = "6715836b4946e8585016e80b79c7561476aff3b22f7b756778e7b109d86086c6"
dependencies = [
"hybrid-array",
"num-traits",
@@ -293,9 +264,9 @@ dependencies = [
[[package]]
name = "crypto-common"
version = "0.1.6"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a"
dependencies = [
"generic-array",
"typenum",
@@ -303,18 +274,18 @@ dependencies = [
[[package]]
name = "crypto-common"
version = "0.2.0-rc.4"
version = "0.2.0-rc.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a8235645834fbc6832939736ce2f2d08192652269e11010a6240f61b908a1c6"
checksum = "919bd05924682a5480aec713596b9e2aabed3a0a6022fab6847f85a99e5f190a"
dependencies = [
"hybrid-array",
]
[[package]]
name = "crypto-primes"
version = "0.7.0-pre.3"
version = "0.7.0-pre.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25f2523fbb68811c8710829417ad488086720a6349e337c38d12fa81e09e50bf"
checksum = "fdd9b2855017318a49714c07ee8895b89d3510d54fa6d86be5835de74c389609"
dependencies = [
"crypto-bigint",
"libm",
@@ -323,18 +294,32 @@ dependencies = [
[[package]]
name = "cxx"
version = "1.0.170"
version = "1.0.189"
dependencies = [
"cc",
"cxx-build",
"cxxbridge-cmd",
"cxxbridge-flags",
"cxxbridge-macro",
"foldhash",
]
[[package]]
name = "cxx-build"
version = "1.0.189"
dependencies = [
"cc",
"codespan-reporting",
"indexmap",
"proc-macro2",
"quote",
"scratch",
"syn",
]
[[package]]
name = "cxx-gen"
version = "0.7.170"
version = "0.7.189"
dependencies = [
"codespan-reporting",
"indexmap",
@@ -345,7 +330,7 @@ dependencies = [
[[package]]
name = "cxxbridge-cmd"
version = "1.0.170"
version = "1.0.189"
dependencies = [
"clap",
"codespan-reporting",
@@ -357,24 +342,23 @@ dependencies = [
[[package]]
name = "cxxbridge-flags"
version = "1.0.170"
version = "1.0.189"
[[package]]
name = "cxxbridge-macro"
version = "1.0.170"
version = "1.0.189"
dependencies = [
"indexmap",
"proc-macro2",
"quote",
"rustversion",
"syn",
]
[[package]]
name = "der"
version = "0.8.0-rc.9"
version = "0.8.0-rc.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e9d8dd2f26c86b27a2a8ea2767ec7f9df7a89516e4794e54ac01ee618dda3aa4"
checksum = "02c1d73e9668ea6b6a28172aa55f3ebec38507131ce179051c8033b5c6037653"
dependencies = [
"const-oid",
"der_derive",
@@ -410,29 +394,29 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [
"block-buffer 0.10.4",
"crypto-common 0.1.6",
"crypto-common 0.1.7",
]
[[package]]
name = "digest"
version = "0.11.0-rc.2"
version = "0.11.0-rc.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6749b668519cd7149ee3d11286a442a8a8bdc3a9d529605f579777bfccc5a4bc"
checksum = "ea390c940e465846d64775e55e3115d5dc934acb953de6f6e6360bc232fe2bf7"
dependencies = [
"block-buffer 0.11.0-rc.5",
"block-buffer 0.11.0",
"const-oid",
"crypto-common 0.2.0-rc.4",
"crypto-common 0.2.0-rc.5",
"subtle",
]
[[package]]
name = "ecdsa"
version = "0.17.0-rc.7"
version = "0.17.0-rc.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4ab355ec063f7a110eb627471058093aba00eb7f4e70afbd15e696b79d1077b"
checksum = "e914ecb8e11a02f42cc05f6b43675d1e5aa4d446cd207f9f818903a1ab34f19f"
dependencies = [
"der",
"digest 0.11.0-rc.2",
"digest 0.11.0-rc.4",
"elliptic-curve",
"rfc6979",
"signature",
@@ -442,19 +426,21 @@ dependencies = [
[[package]]
name = "elliptic-curve"
version = "0.14.0-rc.14"
version = "0.14.0-rc.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ae7ba52b8bca06caab3e74b7cf8858a2934e6e75d80b03dbe48d2d394a4489c"
checksum = "39ecd2903524729de5d0cba7589121744513feadd56d71980cb480c48caceb11"
dependencies = [
"base16ct",
"crypto-bigint",
"digest 0.11.0-rc.2",
"ff",
"group",
"digest 0.11.0-rc.4",
"getrandom",
"hybrid-array",
"once_cell",
"pem-rfc7468",
"pkcs8",
"rand_core",
"rustcrypto-ff",
"rustcrypto-group",
"sec1",
"subtle",
"zeroize",
@@ -472,16 +458,6 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "784a4df722dc6267a04af36895398f59d21d07dce47232adf31ec0ff2fa45e67"
[[package]]
name = "ff"
version = "0.14.0-pre.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d42dd26f5790eda47c1a2158ea4120e32c35ddc9a7743c98a292accc01b54ef3"
dependencies = [
"rand_core",
"subtle",
]
[[package]]
name = "fiat-crypto"
version = "0.3.0"
@@ -490,9 +466,9 @@ checksum = "64cd1e32ddd350061ae6edb1b082d7c54915b5c672c389143b9a63403a109f24"
[[package]]
name = "find-msvc-tools"
version = "0.1.2"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ced73b1dacfc750a6db6c0a0c3a3853c8b41997e2e2c563dc90804ae6867959"
checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844"
[[package]]
name = "flagset"
@@ -502,9 +478,9 @@ checksum = "b7ac824320a75a52197e8f2d787f6a38b6718bb6897a35142d749af3c0e8f4fe"
[[package]]
name = "flate2"
version = "1.1.2"
version = "1.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d"
checksum = "bfe33edd8e85a12a67454e37f8c75e730830d83e313556ab9ebf9ee7fbeb3bfb"
dependencies = [
"crc32fast",
"libz-rs-sys",
@@ -529,57 +505,47 @@ dependencies = [
[[package]]
name = "getrandom"
version = "0.3.3"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4"
checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd"
dependencies = [
"cfg-if",
"libc",
"r-efi",
"wasi",
]
[[package]]
name = "group"
version = "0.14.0-pre.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ff6a0b2dd4b981b1ae9e3e6830ab146771f3660d31d57bafd9018805a91b0f1"
dependencies = [
"ff",
"rand_core",
"subtle",
"wasip2",
]
[[package]]
name = "hashbrown"
version = "0.16.0"
version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d"
checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
[[package]]
name = "hmac"
version = "0.13.0-rc.2"
version = "0.13.0-rc.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3fd4dc94c318c1ede8a2a48341c250d6ddecd3ba793da2820301a9f92417ad9"
checksum = "f1c597ac7d6cc8143e30e83ef70915e7f883b18d8bec2e2b2bce47f5bbb06d57"
dependencies = [
"digest 0.11.0-rc.2",
"digest 0.11.0-rc.4",
]
[[package]]
name = "hybrid-array"
version = "0.4.4"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2bad028b20a90afcdb5e28a53392562f1db2bdfa238aa1a978b911461bfffb92"
checksum = "f471e0a81b2f90ffc0cb2f951ae04da57de8baa46fa99112b062a5173a5088d0"
dependencies = [
"subtle",
"typenum",
"zeroize",
]
[[package]]
name = "indexmap"
version = "2.11.4"
version = "2.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5"
checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2"
dependencies = [
"equivalent",
"hashbrown",
@@ -593,9 +559,9 @@ checksum = "2c4a545a15244c7d945065b5d392b2d2d7f21526fba56ce51467b06ed445e8f7"
[[package]]
name = "libc"
version = "0.2.176"
version = "0.2.177"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174"
checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976"
[[package]]
name = "libm"
@@ -636,9 +602,9 @@ dependencies = [
[[package]]
name = "lzma-rust2"
version = "0.14.2"
version = "0.15.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96d3176655c152883186e161a1031192759660878ac04b2f776290b4f4e770a9"
checksum = "96d43a6fec3e2f1176fd435ff6f0e337dab57361918f0f51bbc75995151e2ca0"
dependencies = [
"crc",
"sha2 0.10.9",
@@ -648,14 +614,12 @@ dependencies = [
name = "magisk"
version = "0.0.0"
dependencies = [
"argh",
"base",
"bit-set",
"bitflags",
"bytemuck",
"cxx",
"cxx-gen",
"derive",
"nix",
"num-derive",
"num-traits",
@@ -668,7 +632,6 @@ dependencies = [
name = "magiskboot"
version = "0.0.0"
dependencies = [
"argh",
"base",
"bytemuck",
"byteorder",
@@ -676,7 +639,7 @@ dependencies = [
"cxx",
"cxx-gen",
"der",
"digest 0.11.0-rc.2",
"digest 0.11.0-rc.4",
"fdt",
"flate2",
"lz4",
@@ -689,7 +652,7 @@ dependencies = [
"quick-protobuf",
"rsa",
"sha1",
"sha2 0.11.0-rc.2",
"sha2 0.11.0-rc.3",
"size",
"x509-cert",
"zopfli",
@@ -710,7 +673,6 @@ dependencies = [
name = "magiskpolicy"
version = "0.0.0"
dependencies = [
"argh",
"base",
"cxx",
"cxx-gen",
@@ -735,6 +697,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316"
dependencies = [
"adler2",
"simd-adler32",
]
[[package]]
@@ -780,51 +743,56 @@ dependencies = [
]
[[package]]
name = "p256"
version = "0.14.0-pre.11"
name = "once_cell"
version = "1.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81b374901df34ee468167a58e2a49e468cb059868479cafebeb804f6b855423d"
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
[[package]]
name = "p256"
version = "0.14.0-rc.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdbe8d6ac92e515ca2179ac331c1e4def09db2217d394683e73dace705c2f0c5"
dependencies = [
"ecdsa",
"elliptic-curve",
"primefield",
"primeorder",
"sha2 0.11.0-rc.2",
"sha2 0.11.0-rc.3",
]
[[package]]
name = "p384"
version = "0.14.0-pre.11"
version = "0.14.0-rc.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "701032b3730df6b882496d6cee8221de0ce4bc11ddc64e6d89784aa5b8a6de30"
checksum = "29c729847b7cf17b9c96f9e6504400f64ae90cb1cdf23610cc1a51f18538ff95"
dependencies = [
"ecdsa",
"elliptic-curve",
"fiat-crypto",
"primefield",
"primeorder",
"sha2 0.11.0-rc.2",
"sha2 0.11.0-rc.3",
]
[[package]]
name = "p521"
version = "0.14.0-pre.11"
version = "0.14.0-rc.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40ba29c2906eb5c89a8c411c4f11243ee4e5517ee7d71d9a13fedc877a6057b1"
checksum = "75296e7cb5d53c8a5083ff26b5707177962cd5851af961a56316e863f1ea757c"
dependencies = [
"base16ct",
"ecdsa",
"elliptic-curve",
"primefield",
"primeorder",
"rand_core",
"sha2 0.11.0-rc.2",
"sha2 0.11.0-rc.3",
]
[[package]]
name = "pb-rs"
version = "0.10.0"
source = "git+https://github.com/tafia/quick-protobuf.git#54e7d6c5d981c6f7cec2e9a2167c10ed0f9392b4"
source = "git+https://github.com/topjohnwu/quick-protobuf.git#980b0fb0ff81f59c0faa6e6db490fb8ecf59c633"
dependencies = [
"log",
"nom",
@@ -832,9 +800,9 @@ dependencies = [
[[package]]
name = "pem-rfc7468"
version = "1.0.0-rc.3"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8e58fab693c712c0d4e88f8eb3087b6521d060bcaf76aeb20cb192d809115ba"
checksum = "a6305423e0e7738146434843d1694d621cce767262b2a86910beab705e4493d9"
dependencies = [
"base64ct",
]
@@ -851,9 +819,9 @@ dependencies = [
[[package]]
name = "pkcs8"
version = "0.11.0-rc.7"
version = "0.11.0-rc.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93eac55f10aceed84769df670ea4a32d2ffad7399400d41ee1c13b1cd8e1b478"
checksum = "77089aec8290d0b7bb01b671b091095cf1937670725af4fd73d47249f03b12c0"
dependencies = [
"der",
"spki",
@@ -861,31 +829,31 @@ dependencies = [
[[package]]
name = "primefield"
version = "0.14.0-pre.6"
version = "0.14.0-rc.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7fcd4a163053332fd93f39b81c133e96a98567660981654579c90a99062fbf5"
checksum = "1c3ad342f52c70a953d95acb09a55450fdc07c2214283b81536c3f83f714568e"
dependencies = [
"crypto-bigint",
"ff",
"rand_core",
"rustcrypto-ff",
"subtle",
"zeroize",
]
[[package]]
name = "primeorder"
version = "0.14.0-pre.9"
version = "0.14.0-rc.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c36e8766fcd270fa9c665b9dc364f570695f5a59240949441b077a397f15b74"
checksum = "f5e84a5f07d7a7c85f299e17753a98d8a09f10799894a637c9ce08d834b6ca02"
dependencies = [
"elliptic-curve",
]
[[package]]
name = "proc-macro2"
version = "1.0.101"
version = "1.0.103"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de"
checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8"
dependencies = [
"unicode-ident",
]
@@ -893,16 +861,16 @@ dependencies = [
[[package]]
name = "quick-protobuf"
version = "0.8.1"
source = "git+https://github.com/tafia/quick-protobuf.git#54e7d6c5d981c6f7cec2e9a2167c10ed0f9392b4"
source = "git+https://github.com/topjohnwu/quick-protobuf.git#980b0fb0ff81f59c0faa6e6db490fb8ecf59c633"
dependencies = [
"byteorder",
]
[[package]]
name = "quote"
version = "1.0.40"
version = "1.0.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f"
dependencies = [
"proc-macro2",
]
@@ -915,18 +883,15 @@ checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
[[package]]
name = "rand_core"
version = "0.9.3"
version = "0.10.0-rc-2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38"
dependencies = [
"getrandom",
]
checksum = "104a23e4e8b77312a823b6b5613edbac78397e2f34320bc7ac4277013ec4478e"
[[package]]
name = "rfc6979"
version = "0.5.0-rc.1"
version = "0.5.0-rc.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d369f9c4f79388704648e7bcb92749c0d6cf4397039293a9b747694fa4fb4bae"
checksum = "63b8e2323084c987a72875b2fd682b7307d5cf14d47e3875bb5e89948e8809d4"
dependencies = [
"hmac",
"subtle",
@@ -934,18 +899,18 @@ dependencies = [
[[package]]
name = "rsa"
version = "0.10.0-rc.8"
version = "0.10.0-rc.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd8c26d4f6d0d2689c1cc822ac369edb64b4a090bc53141ae563bfa19c797300"
checksum = "e499c52862d75a86c0024cc99dcb6d7127d15af3beae7b03573d62fab7ade08a"
dependencies = [
"const-oid",
"crypto-bigint",
"crypto-primes",
"digest 0.11.0-rc.2",
"digest 0.11.0-rc.4",
"pkcs1",
"pkcs8",
"rand_core",
"sha2 0.11.0-rc.2",
"sha2 0.11.0-rc.3",
"signature",
"spki",
"subtle",
@@ -953,16 +918,31 @@ dependencies = [
]
[[package]]
name = "rust-fuzzy-search"
version = "0.1.1"
name = "rustcrypto-ff"
version = "0.14.0-pre.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a157657054ffe556d8858504af8a672a054a6e0bd9e8ee531059100c0fa11bb2"
checksum = "aa9cd37111549306f79b09aa2618e15b1e8241b7178c286821e3dd71579db4db"
dependencies = [
"rand_core",
"subtle",
]
[[package]]
name = "rustversion"
version = "1.0.22"
name = "rustcrypto-group"
version = "0.14.0-pre.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d"
checksum = "e394cd734b5f97dfc3484fa42aad7acd912961c2bcd96c99aa05b3d6cab7cafd"
dependencies = [
"rand_core",
"rustcrypto-ff",
"subtle",
]
[[package]]
name = "scratch"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d68f2ec51b097e4c1a75b681a8bec621909b5e91f15bb7b840c4f2f7b01148b2"
[[package]]
name = "sec1"
@@ -979,9 +959,9 @@ dependencies = [
[[package]]
name = "serde"
version = "1.0.227"
version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "80ece43fc6fbed4eb5392ab50c07334d3e577cbf40997ee896fe7af40bba4245"
checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
dependencies = [
"serde_core",
"serde_derive",
@@ -989,18 +969,18 @@ dependencies = [
[[package]]
name = "serde_core"
version = "1.0.227"
version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a576275b607a2c86ea29e410193df32bc680303c82f31e275bbfcafe8b33be5"
checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.227"
version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51e694923b8824cf0e9b382adf0f60d4e05f348f357b38833a3fa5ed7c2ede04"
checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
dependencies = [
"proc-macro2",
"quote",
@@ -1019,13 +999,13 @@ dependencies = [
[[package]]
name = "sha1"
version = "0.11.0-rc.2"
version = "0.11.0-rc.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5e046edf639aa2e7afb285589e5405de2ef7e61d4b0ac1e30256e3eab911af9"
checksum = "aa1ae819b9870cadc959a052363de870944a1646932d274a4e270f64bf79e5ef"
dependencies = [
"cfg-if",
"cpufeatures",
"digest 0.11.0-rc.2",
"digest 0.11.0-rc.4",
]
[[package]]
@@ -1041,13 +1021,13 @@ dependencies = [
[[package]]
name = "sha2"
version = "0.11.0-rc.2"
version = "0.11.0-rc.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d1e3878ab0f98e35b2df35fe53201d088299b41a6bb63e3e34dada2ac4abd924"
checksum = "19d43dc0354d88b791216bb5c1bfbb60c0814460cc653ae0ebd71f286d0bd927"
dependencies = [
"cfg-if",
"cpufeatures",
"digest 0.11.0-rc.2",
"digest 0.11.0-rc.4",
]
[[package]]
@@ -1058,11 +1038,11 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "signature"
version = "3.0.0-rc.4"
version = "3.0.0-rc.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc280a6ff65c79fbd6622f64d7127f32b85563bca8c53cd2e9141d6744a9056d"
checksum = "2a0251c9d6468f4ba853b6352b190fb7c1e405087779917c238445eb03993826"
dependencies = [
"digest 0.11.0-rc.2",
"digest 0.11.0-rc.4",
"rand_core",
]
@@ -1102,9 +1082,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]]
name = "syn"
version = "2.0.106"
version = "2.0.111"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6"
checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87"
dependencies = [
"proc-macro2",
"quote",
@@ -1122,18 +1102,18 @@ dependencies = [
[[package]]
name = "thiserror"
version = "2.0.16"
version = "2.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0"
checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "2.0.16"
version = "2.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960"
checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913"
dependencies = [
"proc-macro2",
"quote",
@@ -1163,21 +1143,21 @@ dependencies = [
[[package]]
name = "typenum"
version = "1.18.0"
version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f"
checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb"
[[package]]
name = "unicode-ident"
version = "1.0.19"
version = "1.0.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d"
checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5"
[[package]]
name = "unicode-width"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c"
checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254"
[[package]]
name = "unicode-xid"
@@ -1191,15 +1171,6 @@ version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
[[package]]
name = "wasi"
version = "0.14.7+wasi-0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c"
dependencies = [
"wasip2",
]
[[package]]
name = "wasip2"
version = "1.0.1+wasi-0.2.4"
@@ -1220,15 +1191,15 @@ dependencies = [
[[package]]
name = "windows-link"
version = "0.2.0"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "45e46c0661abb7180e7b9c281db115305d49ca1709ab8242adf09666d2173c65"
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
[[package]]
name = "windows-sys"
version = "0.61.1"
version = "0.61.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f109e41dd4a3c848907eb83d5a42ea98b3769495597450cf6d153507b166f0f"
checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
dependencies = [
"windows-link",
]
@@ -1253,9 +1224,9 @@ dependencies = [
[[package]]
name = "zeroize"
version = "1.8.1"
version = "1.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde"
checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0"
dependencies = [
"zeroize_derive",
]
@@ -1279,9 +1250,9 @@ checksum = "2f06ae92f42f5e5c42443fd094f245eb656abf56dd7cce9b8b263236565e00f2"
[[package]]
name = "zopfli"
version = "0.8.2"
version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edfc5ee405f504cd4984ecc6f14d02d55cfda60fa4b689434ef4102aae150cd7"
checksum = "f05cd8797d63865425ff89b5c4a48804f35ba0ce8d125800027ad6017d2b5249"
dependencies = [
"bumpalo",
"crc32fast",

View File

@@ -1,6 +1,6 @@
[workspace]
exclude = ["external"]
members = ["base", "boot", "core", "core/derive", "init", "sepolicy"]
members = ["base", "base/derive", "boot", "core", "init", "sepolicy"]
resolver = "2"
[workspace.package]
@@ -8,53 +8,56 @@ version = "0.0.0"
edition = "2024"
[workspace.dependencies]
base = { path = "base" }
derive = { path = "base/derive" }
magiskpolicy = { path = "sepolicy" }
cxx = { path = "external/cxx-rs" }
cxx-gen = { path = "external/cxx-rs/gen/lib" }
libc = "0.2.176"
cfg-if = "1.0.3"
libc = "0.2.177"
cfg-if = "1.0.4"
num-traits = "0.2.19"
num-derive = "0.4.2"
thiserror = "2.0.16"
thiserror = "2.0.17"
byteorder = "1.5.0"
size = "0.5.0"
bytemuck = "1.23.2"
bytemuck = "1.24.0"
fdt = "0.1.5"
const_format = "0.2.34"
const_format = "0.2.35"
bit-set = "0.8.0"
syn = "2.0.106"
quote = "1.0.40"
proc-macro2 = "1.0.101"
argh = { version = "0.1.13", default-features = false }
syn = "2.0.111"
quote = "1.0.42"
proc-macro2 = "1.0.103"
pb-rs = { version = "0.10.0", default-features = false }
quick-protobuf = "0.8.1"
flate2 = { version = "1.1.2", default-features = false }
bzip2 = "0.6.0"
zopfli = "0.8.2"
flate2 = { version = "1.1.5", default-features = false }
bzip2 = "0.6.1"
zopfli = "0.8.3"
lz4 = "1.28.1"
lzma-rust2 = { version = "0.14.2", default-features = false }
lzma-rust2 = { version = "0.15.2", default-features = false }
nix = "0.30.1"
bitflags = "2.9.4"
bitflags = "2.10.0"
# Rust crypto crates are tied together
sha1 = "0.11.0-rc.2"
sha2 = "0.11.0-rc.2"
digest = "0.11.0-rc.2"
p256 = "0.14.0-pre.11"
p384 = "0.14.0-pre.11"
p521 = "0.14.0-pre.11"
rsa = "0.10.0-rc.8"
sha1 = "0.11.0-rc.3"
sha2 = "0.11.0-rc.3"
digest = "0.11.0-rc.4"
p256 = "0.14.0-rc.1"
p384 = "0.14.0-rc.1"
p521 = "0.14.0-rc.1"
rsa = "0.10.0-rc.10"
x509-cert = "0.3.0-rc.2"
der = "0.8.0-rc.9"
der = "0.8.0-rc.10"
[patch.crates-io]
pb-rs = { git = "https://github.com/tafia/quick-protobuf.git" }
quick-protobuf = { git = "https://github.com/tafia/quick-protobuf.git" }
pb-rs = { git = "https://github.com/topjohnwu/quick-protobuf.git" }
quick-protobuf = { git = "https://github.com/topjohnwu/quick-protobuf.git" }
lz4-sys = { path = "external/lz4-sys" }
[profile.dev]
opt-level = "z"
lto = "thin"
panic = "abort"
debug = "none"
[profile.release]
opt-level = "z"

View File

@@ -13,11 +13,11 @@ selinux = []
cxx-gen = { workspace = true }
[dependencies]
derive = { workspace = true }
cxx = { workspace = true }
libc = { workspace = true }
cfg-if = { workspace = true }
thiserror = { workspace = true }
argh = { workspace = true }
bytemuck = { workspace = true }
num-traits = { workspace = true }
num-derive = { workspace = true }

1226
native/src/base/argh.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -10,6 +10,7 @@
#include <string>
#include <base.hpp>
#include <flags.h>
using namespace std;

View File

@@ -0,0 +1,185 @@
// Copyright (c) 2020 Google LLC All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
use proc_macro2::{Span, TokenStream};
use quote::ToTokens;
use std::cell::RefCell;
/// A type for collecting procedural macro errors.
#[derive(Default)]
pub struct Errors {
errors: RefCell<Vec<syn::Error>>,
}
/// Produce functions to expect particular literals in `syn::Expr`
macro_rules! expect_lit_fn {
($(($fn_name:ident, $syn_type:ident, $variant:ident, $lit_name:literal),)*) => {
$(
pub fn $fn_name<'a>(&self, e: &'a syn::Expr) -> Option<&'a syn::$syn_type> {
if let syn::Expr::Lit(syn::ExprLit { lit: syn::Lit::$variant(inner), .. }) = e {
Some(inner)
} else {
self.unexpected_lit($lit_name, e);
None
}
}
)*
}
}
/// Produce functions to expect particular variants of `syn::Meta`
macro_rules! expect_meta_fn {
($(($fn_name:ident, $syn_type:ident, $variant:ident, $meta_name:literal),)*) => {
$(
pub fn $fn_name<'a>(&self, meta: &'a syn::Meta) -> Option<&'a syn::$syn_type> {
if let syn::Meta::$variant(inner) = meta {
Some(inner)
} else {
self.unexpected_meta($meta_name, meta);
None
}
}
)*
}
}
impl Errors {
/// Issue an error like:
///
/// Duplicate foo attribute
/// First foo attribute here
pub fn duplicate_attrs(
&self,
attr_kind: &str,
first: &impl syn::spanned::Spanned,
second: &impl syn::spanned::Spanned,
) {
self.duplicate_attrs_inner(attr_kind, first.span(), second.span())
}
fn duplicate_attrs_inner(&self, attr_kind: &str, first: Span, second: Span) {
self.err_span(second, &["Duplicate ", attr_kind, " attribute"].concat());
self.err_span(first, &["First ", attr_kind, " attribute here"].concat());
}
expect_lit_fn![
(expect_lit_str, LitStr, Str, "string"),
(expect_lit_char, LitChar, Char, "character"),
(expect_lit_int, LitInt, Int, "integer"),
];
expect_meta_fn![
(expect_meta_word, Path, Path, "path"),
(expect_meta_list, MetaList, List, "list"),
(
expect_meta_name_value,
MetaNameValue,
NameValue,
"name-value pair"
),
];
fn unexpected_lit(&self, expected: &str, found: &syn::Expr) {
fn lit_kind(lit: &syn::Lit) -> &'static str {
use syn::Lit::{Bool, Byte, ByteStr, Char, Float, Int, Str, Verbatim};
match lit {
Str(_) => "string",
ByteStr(_) => "bytestring",
Byte(_) => "byte",
Char(_) => "character",
Int(_) => "integer",
Float(_) => "float",
Bool(_) => "boolean",
Verbatim(_) => "unknown (possibly extra-large integer)",
_ => "unknown literal kind",
}
}
if let syn::Expr::Lit(syn::ExprLit { lit, .. }) = found {
self.err(
found,
&[
"Expected ",
expected,
" literal, found ",
lit_kind(lit),
" literal",
]
.concat(),
)
} else {
self.err(
found,
&[
"Expected ",
expected,
" literal, found non-literal expression.",
]
.concat(),
)
}
}
fn unexpected_meta(&self, expected: &str, found: &syn::Meta) {
fn meta_kind(meta: &syn::Meta) -> &'static str {
use syn::Meta::{List, NameValue, Path};
match meta {
Path(_) => "path",
List(_) => "list",
NameValue(_) => "name-value pair",
}
}
self.err(
found,
&[
"Expected ",
expected,
" attribute, found ",
meta_kind(found),
" attribute",
]
.concat(),
)
}
/// Issue an error relating to a particular `Spanned` structure.
pub fn err(&self, spanned: &impl syn::spanned::Spanned, msg: &str) {
self.err_span(spanned.span(), msg);
}
/// Issue an error relating to a particular `Span`.
pub fn err_span(&self, span: Span, msg: &str) {
self.push(syn::Error::new(span, msg));
}
/// Issue an error spanning over the given syntax tree node.
pub fn err_span_tokens<T: ToTokens>(&self, tokens: T, msg: &str) {
self.push(syn::Error::new_spanned(tokens, msg));
}
/// Push a `syn::Error` onto the list of errors to issue.
pub fn push(&self, err: syn::Error) {
self.errors.borrow_mut().push(err);
}
/// Convert a `syn::Result` to an `Option`, logging the error if present.
pub fn ok<T>(&self, r: syn::Result<T>) -> Option<T> {
match r {
Ok(v) => Some(v),
Err(e) => {
self.push(e);
None
}
}
}
}
impl ToTokens for Errors {
/// Convert the errors into tokens that, when emit, will cause
/// the user of the macro to receive compiler errors.
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.extend(self.errors.borrow().iter().map(|e| e.to_compile_error()));
}
}

View File

@@ -0,0 +1,912 @@
// Copyright (c) 2020 Google LLC All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
use syn::ext::IdentExt as _;
/// Implementation of the `FromArgs` and `argh(...)` derive attributes.
///
/// For more thorough documentation, see the `argh` crate itself.
extern crate proc_macro;
use errors::Errors;
use parse_attrs::{FieldAttrs, FieldKind, TypeAttrs, check_long_name};
use proc_macro2::{Span, TokenStream};
use quote::{ToTokens, quote, quote_spanned};
use std::collections::HashMap;
use std::str::FromStr;
use syn::spanned::Spanned;
use syn::{GenericArgument, LitStr, PathArguments, Type};
mod errors;
mod parse_attrs;
/// Transform the input into a token stream containing any generated implementations,
/// as well as all errors that occurred.
pub(crate) fn impl_from_args(input: &syn::DeriveInput) -> TokenStream {
let errors = &Errors::default();
let type_attrs = &TypeAttrs::parse(errors, input);
let mut output_tokens = match &input.data {
syn::Data::Struct(ds) => {
impl_from_args_struct(errors, &input.ident, type_attrs, &input.generics, ds)
}
syn::Data::Enum(de) => {
impl_from_args_enum(errors, &input.ident, type_attrs, &input.generics, de)
}
syn::Data::Union(_) => {
errors.err(input, "`#[derive(FromArgs)]` cannot be applied to unions");
TokenStream::new()
}
};
errors.to_tokens(&mut output_tokens);
output_tokens
}
/// The kind of optionality a parameter has.
enum Optionality {
None,
Defaulted(TokenStream),
Optional,
Repeating,
DefaultedRepeating(TokenStream),
}
impl PartialEq<Optionality> for Optionality {
fn eq(&self, other: &Optionality) -> bool {
use Optionality::*;
// NB: (Defaulted, Defaulted) can't contain the same token streams
matches!((self, other), (Optional, Optional) | (Repeating, Repeating))
}
}
impl Optionality {
/// Whether or not this is `Optionality::None`
fn is_required(&self) -> bool {
matches!(self, Optionality::None)
}
}
/// A field of a `#![derive(FromArgs)]` struct with attributes and some other
/// notable metadata appended.
struct StructField<'a> {
/// The original parsed field
field: &'a syn::Field,
/// The parsed attributes of the field
attrs: FieldAttrs,
/// The field name. This is contained optionally inside `field`,
/// but is duplicated non-optionally here to indicate that all field that
/// have reached this point must have a field name, and it no longer
/// needs to be unwrapped.
name: &'a syn::Ident,
/// Similar to `name` above, this is contained optionally inside `FieldAttrs`,
/// but here is fully present to indicate that we only have to consider fields
/// with a valid `kind` at this point.
kind: FieldKind,
// If `field.ty` is `Vec<T>` or `Option<T>`, this is `T`, otherwise it's `&field.ty`.
// This is used to enable consistent parsing code between optional and non-optional
// keyed and subcommand fields.
ty_without_wrapper: &'a syn::Type,
// Whether the field represents an optional value, such as an `Option` subcommand field
// or an `Option` or `Vec` keyed argument, or if it has a `default`.
optionality: Optionality,
// The `--`-prefixed name of the option, if one exists.
long_name: Option<String>,
}
impl<'a> StructField<'a> {
/// Attempts to parse a field of a `#[derive(FromArgs)]` struct, pulling out the
/// fields required for code generation.
fn new(errors: &Errors, field: &'a syn::Field, attrs: FieldAttrs) -> Option<Self> {
let name = field.ident.as_ref().expect("missing ident for named field");
// Ensure that one "kind" is present (switch, option, subcommand, positional)
let kind = if let Some(field_type) = &attrs.field_type {
field_type.kind
} else {
errors.err(
field,
concat!(
"Missing `argh` field kind attribute.\n",
"Expected one of: `switch`, `option`, `remaining`, `subcommand`, `positional`",
),
);
return None;
};
// Parse out whether a field is optional (`Option` or `Vec`).
let optionality;
let ty_without_wrapper;
match kind {
FieldKind::Switch => {
if !ty_expect_switch(errors, &field.ty) {
return None;
}
optionality = Optionality::Optional;
ty_without_wrapper = &field.ty;
}
FieldKind::Option | FieldKind::Positional => {
if let Some(default) = &attrs.default {
let tokens = match TokenStream::from_str(&default.value()) {
Ok(tokens) => tokens,
Err(_) => {
errors.err(&default, "Invalid tokens: unable to lex `default` value");
return None;
}
};
// Set the span of the generated tokens to the string literal
let tokens: TokenStream = tokens
.into_iter()
.map(|mut tree| {
tree.set_span(default.span());
tree
})
.collect();
let inner = if let Some(x) = ty_inner(&["Vec"], &field.ty) {
optionality = Optionality::DefaultedRepeating(tokens);
x
} else {
optionality = Optionality::Defaulted(tokens);
&field.ty
};
ty_without_wrapper = inner;
} else {
let mut inner = None;
optionality = if let Some(x) = ty_inner(&["Option"], &field.ty) {
inner = Some(x);
Optionality::Optional
} else if let Some(x) = ty_inner(&["Vec"], &field.ty) {
inner = Some(x);
Optionality::Repeating
} else {
Optionality::None
};
ty_without_wrapper = inner.unwrap_or(&field.ty);
}
}
FieldKind::SubCommand => {
let inner = ty_inner(&["Option"], &field.ty);
optionality = if inner.is_some() {
Optionality::Optional
} else {
Optionality::None
};
ty_without_wrapper = inner.unwrap_or(&field.ty);
}
}
// Determine the "long" name of options and switches.
// Defaults to the kebab-cased field name if `#[argh(long = "...")]` is omitted.
// If `#[argh(long = none)]` is explicitly set, no long name will be set.
let long_name = match kind {
FieldKind::Switch | FieldKind::Option => {
let long_name = match &attrs.long {
None => {
let kebab_name = to_kebab_case(&name.unraw().to_string());
check_long_name(errors, name, &kebab_name);
Some(kebab_name)
}
Some(None) => None,
Some(Some(long)) => Some(long.value()),
}
.map(|long_name| {
if long_name == "help" {
errors.err(field, "Custom `--help` flags are not supported.");
}
format!("--{}", long_name)
});
if let (None, None) = (&attrs.short, &long_name) {
errors.err(field, "At least one of `short` or `long` has to be set.")
};
long_name
}
FieldKind::SubCommand | FieldKind::Positional => None,
};
Some(StructField {
field,
attrs,
kind,
optionality,
ty_without_wrapper,
name,
long_name,
})
}
pub(crate) fn positional_arg_name(&self) -> String {
self.attrs
.arg_name
.as_ref()
.map(LitStr::value)
.unwrap_or_else(|| self.name.to_string().trim_matches('_').to_owned())
}
fn option_arg_name(&self) -> String {
match (&self.attrs.short, &self.long_name) {
(None, None) => unreachable!("short and long cannot both be None"),
(Some(short), None) => format!("-{}", short.value()),
(None, Some(long)) => long.clone(),
(Some(short), Some(long)) => format!("-{},{long}", short.value()),
}
}
}
fn to_kebab_case(s: &str) -> String {
let words = s.split('_').filter(|word| !word.is_empty());
let mut res = String::with_capacity(s.len());
for word in words {
if !res.is_empty() {
res.push('-')
}
res.push_str(word)
}
res
}
/// Implements `FromArgs` and `TopLevelCommand` or `SubCommand` for a `#[derive(FromArgs)]` struct.
fn impl_from_args_struct(
errors: &Errors,
name: &syn::Ident,
type_attrs: &TypeAttrs,
generic_args: &syn::Generics,
ds: &syn::DataStruct,
) -> TokenStream {
let fields = match &ds.fields {
syn::Fields::Named(fields) => fields,
syn::Fields::Unnamed(_) => {
errors.err(
&ds.struct_token,
"`#![derive(FromArgs)]` is not currently supported on tuple structs",
);
return TokenStream::new();
}
syn::Fields::Unit => {
errors.err(
&ds.struct_token,
"#![derive(FromArgs)]` cannot be applied to unit structs",
);
return TokenStream::new();
}
};
let fields: Vec<_> = fields
.named
.iter()
.filter_map(|field| {
let attrs = FieldAttrs::parse(errors, field);
StructField::new(errors, field, attrs)
})
.collect();
ensure_unique_names(errors, &fields);
ensure_only_trailing_positionals_are_optional(errors, &fields);
let impl_span = Span::call_site();
let from_args_method = impl_from_args_struct_from_args(errors, type_attrs, &fields);
let top_or_sub_cmd_impl = top_or_sub_cmd_impl(errors, name, type_attrs, generic_args);
let (impl_generics, ty_generics, where_clause) = generic_args.split_for_impl();
let trait_impl = quote_spanned! { impl_span =>
#[automatically_derived]
impl #impl_generics argh::FromArgs for #name #ty_generics #where_clause {
#from_args_method
}
#top_or_sub_cmd_impl
};
trait_impl
}
fn impl_from_args_struct_from_args<'a>(
errors: &Errors,
type_attrs: &TypeAttrs,
fields: &'a [StructField<'a>],
) -> TokenStream {
let init_fields = declare_local_storage_for_from_args_fields(fields);
let unwrap_fields = unwrap_from_args_fields(fields);
let positional_fields: Vec<&StructField<'_>> = fields
.iter()
.filter(|field| field.kind == FieldKind::Positional)
.collect();
let positional_field_idents = positional_fields.iter().map(|field| &field.field.ident);
let positional_field_names = positional_fields.iter().map(|field| field.name.to_string());
let last_positional_is_repeating = positional_fields
.last()
.map(|field| field.optionality == Optionality::Repeating)
.unwrap_or(false);
let last_positional_is_greedy = positional_fields
.last()
.map(|field| field.kind == FieldKind::Positional && field.attrs.greedy.is_some())
.unwrap_or(false);
let flag_output_table = fields.iter().filter_map(|field| {
let field_name = &field.field.ident;
match field.kind {
FieldKind::Option => Some(quote! { argh::ParseStructOption::Value(&mut #field_name) }),
FieldKind::Switch => Some(quote! { argh::ParseStructOption::Flag(&mut #field_name) }),
FieldKind::SubCommand | FieldKind::Positional => None,
}
});
let flag_str_to_output_table_map = flag_str_to_output_table_map_entries(fields);
let mut subcommands_iter = fields
.iter()
.filter(|field| field.kind == FieldKind::SubCommand)
.fuse();
let subcommand: Option<&StructField<'_>> = subcommands_iter.next();
for dup_subcommand in subcommands_iter {
errors.duplicate_attrs(
"subcommand",
subcommand.unwrap().field,
dup_subcommand.field,
);
}
let impl_span = Span::call_site();
let missing_requirements_ident = syn::Ident::new("__missing_requirements", impl_span);
let append_missing_requirements =
append_missing_requirements(&missing_requirements_ident, fields);
let parse_subcommands = if let Some(subcommand) = subcommand {
let name = subcommand.name;
let ty = subcommand.ty_without_wrapper;
quote_spanned! { impl_span =>
Some(argh::ParseStructSubCommand {
subcommands: <#ty as argh::SubCommands>::COMMANDS,
dynamic_subcommands: &<#ty as argh::SubCommands>::dynamic_commands(),
parse_func: &mut |__command, __remaining_args| {
#name = Some(<#ty as argh::FromArgs>::from_args(__command, __remaining_args)?);
Ok(())
},
})
}
} else {
quote_spanned! { impl_span => None }
};
let help_triggers = get_help_triggers(type_attrs);
let method_impl = quote_spanned! { impl_span =>
fn from_args(__cmd_name: &[&str], __args: &[&str])
-> std::result::Result<Self, argh::EarlyExit>
{
#![allow(clippy::unwrap_in_result)]
#( #init_fields )*
argh::parse_struct_args(
__cmd_name,
__args,
argh::ParseStructOptions {
arg_to_slot: &[ #( #flag_str_to_output_table_map ,)* ],
slots: &mut [ #( #flag_output_table, )* ],
help_triggers: &[ #( #help_triggers ),* ],
},
argh::ParseStructPositionals {
positionals: &mut [
#(
argh::ParseStructPositional {
name: #positional_field_names,
slot: &mut #positional_field_idents as &mut dyn argh::ParseValueSlot,
},
)*
],
last_is_repeating: #last_positional_is_repeating,
last_is_greedy: #last_positional_is_greedy,
},
#parse_subcommands,
)?;
let mut #missing_requirements_ident = argh::MissingRequirements::default();
#(
#append_missing_requirements
)*
#missing_requirements_ident.err_on_any()?;
Ok(Self {
#( #unwrap_fields, )*
})
}
};
method_impl
}
/// get help triggers vector from type_attrs.help_triggers as a [`Vec<String>`]
///
/// Defaults to vec!["-h", "--help"] if type_attrs.help_triggers is None
fn get_help_triggers(type_attrs: &TypeAttrs) -> Vec<String> {
if type_attrs.is_subcommand.is_some() {
// Subcommands should never have any help triggers
Vec::new()
} else {
type_attrs.help_triggers.as_ref().map_or_else(
|| vec!["-h".to_string(), "--help".to_string()],
|s| {
s.iter()
.filter_map(|s| {
let trigger = s.value();
let trigger_trimmed = trigger.trim().to_owned();
if trigger_trimmed.is_empty() {
None
} else {
Some(trigger_trimmed)
}
})
.collect::<Vec<_>>()
},
)
}
}
/// Ensures that only trailing positional args are non-required.
fn ensure_only_trailing_positionals_are_optional(errors: &Errors, fields: &[StructField<'_>]) {
let mut first_non_required_span = None;
for field in fields {
if field.kind == FieldKind::Positional {
if let Some(first) = first_non_required_span
&& field.optionality.is_required()
{
errors.err_span(
first,
"Only trailing positional arguments may be `Option`, `Vec`, or defaulted.",
);
errors.err(
&field.field,
"Later non-optional positional argument declared here.",
);
return;
}
if !field.optionality.is_required() {
first_non_required_span = Some(field.field.span());
}
}
}
}
/// Ensures that only one short or long name is used.
fn ensure_unique_names(errors: &Errors, fields: &[StructField<'_>]) {
let mut seen_short_names = HashMap::new();
let mut seen_long_names = HashMap::new();
for field in fields {
if let Some(short_name) = &field.attrs.short {
let short_name = short_name.value();
if let Some(first_use_field) = seen_short_names.get(&short_name) {
errors.err_span_tokens(
first_use_field,
&format!(
"The short name of \"-{}\" was already used here.",
short_name
),
);
errors.err_span_tokens(field.field, "Later usage here.");
}
seen_short_names.insert(short_name, &field.field);
}
if let Some(long_name) = &field.long_name {
if let Some(first_use_field) = seen_long_names.get(&long_name) {
errors.err_span_tokens(
*first_use_field,
&format!("The long name of \"{}\" was already used here.", long_name),
);
errors.err_span_tokens(field.field, "Later usage here.");
}
seen_long_names.insert(long_name, field.field);
}
}
}
/// Implement `argh::TopLevelCommand` or `argh::SubCommand` as appropriate.
fn top_or_sub_cmd_impl(
errors: &Errors,
name: &syn::Ident,
type_attrs: &TypeAttrs,
generic_args: &syn::Generics,
) -> TokenStream {
let description = String::new();
let (impl_generics, ty_generics, where_clause) = generic_args.split_for_impl();
if type_attrs.is_subcommand.is_none() {
// Not a subcommand
quote! {
#[automatically_derived]
impl #impl_generics argh::TopLevelCommand for #name #ty_generics #where_clause {}
}
} else {
let empty_str = syn::LitStr::new("", Span::call_site());
let subcommand_name = type_attrs.name.as_ref().unwrap_or_else(|| {
errors.err(
name,
"`#[argh(name = \"...\")]` attribute is required for subcommands",
);
&empty_str
});
quote! {
#[automatically_derived]
impl #impl_generics argh::SubCommand for #name #ty_generics #where_clause {
const COMMAND: &'static argh::CommandInfo = &argh::CommandInfo {
name: #subcommand_name,
description: #description,
};
}
}
}
}
/// Declare a local slots to store each field in during parsing.
///
/// Most fields are stored in `Option<FieldType>` locals.
/// `argh(option)` fields are stored in a `ParseValueSlotTy` along with a
/// function that knows how to decode the appropriate value.
fn declare_local_storage_for_from_args_fields<'a>(
fields: &'a [StructField<'a>],
) -> impl Iterator<Item = TokenStream> + 'a {
fields.iter().map(|field| {
let field_name = &field.field.ident;
let field_type = &field.ty_without_wrapper;
// Wrap field types in `Option` if they aren't already `Option` or `Vec`-wrapped.
let field_slot_type = match field.optionality {
Optionality::Optional | Optionality::Repeating => (&field.field.ty).into_token_stream(),
Optionality::None | Optionality::Defaulted(_) => {
quote! { std::option::Option<#field_type> }
}
Optionality::DefaultedRepeating(_) => {
quote! { std::option::Option<std::vec::Vec<#field_type>> }
}
};
match field.kind {
FieldKind::Option | FieldKind::Positional => {
let from_str_fn = match &field.attrs.from_str_fn {
Some(from_str_fn) => from_str_fn.into_token_stream(),
None => {
quote! {
<#field_type as argh::FromArgValue>::from_arg_value
}
}
};
quote! {
let mut #field_name: argh::ParseValueSlotTy<#field_slot_type, #field_type>
= argh::ParseValueSlotTy {
slot: std::default::Default::default(),
parse_func: |_, value| { #from_str_fn(value) },
};
}
}
FieldKind::SubCommand => {
quote! { let mut #field_name: #field_slot_type = None; }
}
FieldKind::Switch => {
quote! { let mut #field_name: #field_slot_type = argh::Flag::default(); }
}
}
})
}
/// Unwrap non-optional fields and take options out of their tuple slots.
fn unwrap_from_args_fields<'a>(
fields: &'a [StructField<'a>],
) -> impl Iterator<Item = TokenStream> + 'a {
fields.iter().map(|field| {
let field_name = field.name;
match field.kind {
FieldKind::Option | FieldKind::Positional => match &field.optionality {
Optionality::None => quote! {
#field_name: #field_name.slot.unwrap()
},
Optionality::Optional | Optionality::Repeating => {
quote! { #field_name: #field_name.slot }
}
Optionality::Defaulted(tokens) | Optionality::DefaultedRepeating(tokens) => {
quote! {
#field_name: #field_name.slot.unwrap_or_else(|| #tokens)
}
}
},
FieldKind::Switch => field_name.into_token_stream(),
FieldKind::SubCommand => match field.optionality {
Optionality::None => quote! { #field_name: #field_name.unwrap() },
Optionality::Optional | Optionality::Repeating => field_name.into_token_stream(),
Optionality::Defaulted(_) | Optionality::DefaultedRepeating(_) => unreachable!(),
},
}
})
}
/// Entries of tokens like `("--some-flag-key", 5)` that map from a flag key string
/// to an index in the output table.
fn flag_str_to_output_table_map_entries<'a>(fields: &'a [StructField<'a>]) -> Vec<TokenStream> {
let mut flag_str_to_output_table_map = vec![];
for (i, field) in fields.iter().enumerate() {
if let Some(short) = &field.attrs.short {
let short = format!("-{}", short.value());
flag_str_to_output_table_map.push(quote! { (#short, #i) });
}
if let Some(long) = &field.long_name {
flag_str_to_output_table_map.push(quote! { (#long, #i) });
}
}
flag_str_to_output_table_map
}
/// For each non-optional field, add an entry to the `argh::MissingRequirements`.
fn append_missing_requirements<'a>(
// missing_requirements_ident
mri: &syn::Ident,
fields: &'a [StructField<'a>],
) -> impl Iterator<Item = TokenStream> + 'a {
let mri = mri.clone();
fields
.iter()
.filter(|f| f.optionality.is_required())
.map(move |field| {
let field_name = field.name;
match field.kind {
FieldKind::Switch => unreachable!("switches are always optional"),
FieldKind::Positional => {
let name = field.positional_arg_name();
quote! {
if #field_name.slot.is_none() {
#mri.missing_positional_arg(#name)
}
}
}
FieldKind::Option => {
let name = field.option_arg_name();
quote! {
if #field_name.slot.is_none() {
#mri.missing_option(#name)
}
}
}
FieldKind::SubCommand => {
let ty = field.ty_without_wrapper;
quote! {
if #field_name.is_none() {
#mri.missing_subcommands(
<#ty as argh::SubCommands>::COMMANDS
.iter()
.cloned()
.chain(
<#ty as argh::SubCommands>::dynamic_commands()
.iter()
.copied()
),
)
}
}
}
}
})
}
/// Require that a type can be a `switch`.
/// Throws an error for all types except booleans and integers
fn ty_expect_switch(errors: &Errors, ty: &syn::Type) -> bool {
fn ty_can_be_switch(ty: &syn::Type) -> bool {
if let syn::Type::Path(path) = ty {
if path.qself.is_some() {
return false;
}
if path.path.segments.len() != 1 {
return false;
}
let ident = &path.path.segments[0].ident;
// `Option<bool>` can be used as a `switch`.
if ident == "Option"
&& let PathArguments::AngleBracketed(args) = &path.path.segments[0].arguments
&& let GenericArgument::Type(Type::Path(p)) = &args.args[0]
&& p.path.segments[0].ident == "bool"
{
return true;
}
[
"bool", "u8", "u16", "u32", "u64", "u128", "i8", "i16", "i32", "i64", "i128",
]
.iter()
.any(|path| ident == path)
} else {
false
}
}
let res = ty_can_be_switch(ty);
if !res {
errors.err(
ty,
"switches must be of type `bool`, `Option<bool>`, or integer type",
);
}
res
}
/// Returns `Some(T)` if a type is `wrapper_name<T>` for any `wrapper_name` in `wrapper_names`.
fn ty_inner<'a>(wrapper_names: &[&str], ty: &'a syn::Type) -> Option<&'a syn::Type> {
if let syn::Type::Path(path) = ty {
if path.qself.is_some() {
return None;
}
// Since we only check the last path segment, it isn't necessarily the case that
// we're referring to `std::vec::Vec` or `std::option::Option`, but there isn't
// a fool proof way to check these since name resolution happens after macro expansion,
// so this is likely "good enough" (so long as people don't have their own types called
// `Option` or `Vec` that take one generic parameter they're looking to parse).
let last_segment = path.path.segments.last()?;
if !wrapper_names.iter().any(|name| last_segment.ident == *name) {
return None;
}
if let syn::PathArguments::AngleBracketed(gen_args) = &last_segment.arguments {
let generic_arg = gen_args.args.first()?;
if let syn::GenericArgument::Type(ty) = &generic_arg {
return Some(ty);
}
}
}
None
}
/// Implements `FromArgs` and `SubCommands` for a `#![derive(FromArgs)]` enum.
fn impl_from_args_enum(
errors: &Errors,
name: &syn::Ident,
type_attrs: &TypeAttrs,
generic_args: &syn::Generics,
de: &syn::DataEnum,
) -> TokenStream {
parse_attrs::check_enum_type_attrs(errors, type_attrs, &de.enum_token.span);
// An enum variant like `<name>(<ty>)`
struct SubCommandVariant<'a> {
name: &'a syn::Ident,
ty: &'a syn::Type,
}
let mut dynamic_type_and_variant = None;
let variants: Vec<SubCommandVariant<'_>> = de
.variants
.iter()
.filter_map(|variant| {
let name = &variant.ident;
let ty = enum_only_single_field_unnamed_variants(errors, &variant.fields)?;
if parse_attrs::VariantAttrs::parse(errors, variant)
.is_dynamic
.is_some()
{
if dynamic_type_and_variant.is_some() {
errors.err(variant, "Only one variant can have the `dynamic` attribute");
}
dynamic_type_and_variant = Some((ty, name));
None
} else {
Some(SubCommandVariant { name, ty })
}
})
.collect();
let name_repeating = std::iter::repeat(name.clone());
let variant_ty = variants.iter().map(|x| x.ty).collect::<Vec<_>>();
let variant_names = variants.iter().map(|x| x.name).collect::<Vec<_>>();
let dynamic_from_args =
dynamic_type_and_variant
.as_ref()
.map(|(dynamic_type, dynamic_variant)| {
quote! {
if let Some(result) = <#dynamic_type as argh::DynamicSubCommand>::try_from_args(
command_name, args) {
return result.map(#name::#dynamic_variant);
}
}
});
let dynamic_commands = dynamic_type_and_variant.as_ref().map(|(dynamic_type, _)| {
quote! {
fn dynamic_commands() -> &'static [&'static argh::CommandInfo] {
<#dynamic_type as argh::DynamicSubCommand>::commands()
}
}
});
let (impl_generics, ty_generics, where_clause) = generic_args.split_for_impl();
quote! {
impl #impl_generics argh::FromArgs for #name #ty_generics #where_clause {
fn from_args(command_name: &[&str], args: &[&str])
-> std::result::Result<Self, argh::EarlyExit>
{
let subcommand_name = if let Some(subcommand_name) = command_name.last() {
*subcommand_name
} else {
return Err(argh::EarlyExit::from("no subcommand name".to_owned()));
};
#(
if subcommand_name == <#variant_ty as argh::SubCommand>::COMMAND.name {
return Ok(#name_repeating::#variant_names(
<#variant_ty as argh::FromArgs>::from_args(command_name, args)?
));
}
)*
#dynamic_from_args
Err(argh::EarlyExit::from("no subcommand matched".to_owned()))
}
}
impl #impl_generics argh::SubCommands for #name #ty_generics #where_clause {
const COMMANDS: &'static [&'static argh::CommandInfo] = &[#(
<#variant_ty as argh::SubCommand>::COMMAND,
)*];
#dynamic_commands
}
}
}
/// Returns `Some(Bar)` if the field is a single-field unnamed variant like `Foo(Bar)`.
/// Otherwise, generates an error.
fn enum_only_single_field_unnamed_variants<'a>(
errors: &Errors,
variant_fields: &'a syn::Fields,
) -> Option<&'a syn::Type> {
macro_rules! with_enum_suggestion {
($help_text:literal) => {
concat!(
$help_text,
"\nInstead, use a variant with a single unnamed field for each subcommand:\n",
" enum MyCommandEnum {\n",
" SubCommandOne(SubCommandOne),\n",
" SubCommandTwo(SubCommandTwo),\n",
" }",
)
};
}
match variant_fields {
syn::Fields::Named(fields) => {
errors.err(
fields,
with_enum_suggestion!(
"`#![derive(FromArgs)]` `enum`s do not support variants with named fields."
),
);
None
}
syn::Fields::Unit => {
errors.err(
variant_fields,
with_enum_suggestion!(
"`#![derive(FromArgs)]` does not support `enum`s with no variants."
),
);
None
}
syn::Fields::Unnamed(fields) => {
if fields.unnamed.len() != 1 {
errors.err(
fields,
with_enum_suggestion!(
"`#![derive(FromArgs)]` `enum` variants must only contain one field."
),
);
None
} else {
// `unwrap` is okay because of the length check above.
let first_field = fields.unnamed.first().unwrap();
Some(&first_field.ty)
}
}
}
}

View File

@@ -0,0 +1,688 @@
// Copyright (c) 2020 Google LLC All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
use syn::parse::Parser;
use syn::punctuated::Punctuated;
use super::errors::Errors;
use proc_macro2::Span;
use std::collections::hash_map::{Entry, HashMap};
/// Attributes applied to a field of a `#![derive(FromArgs)]` struct.
#[derive(Default)]
pub struct FieldAttrs {
pub default: Option<syn::LitStr>,
pub description: Option<Description>,
pub from_str_fn: Option<syn::ExprPath>,
pub field_type: Option<FieldType>,
pub long: Option<Option<syn::LitStr>>,
pub short: Option<syn::LitChar>,
pub arg_name: Option<syn::LitStr>,
pub greedy: Option<syn::Path>,
pub hidden_help: bool,
}
/// The purpose of a particular field on a `#![derive(FromArgs)]` struct.
#[derive(Copy, Clone, Eq, PartialEq)]
pub enum FieldKind {
/// Switches are booleans that are set to "true" by passing the flag.
Switch,
/// Options are `--key value`. They may be optional (using `Option`),
/// or repeating (using `Vec`), or required (neither `Option` nor `Vec`)
Option,
/// Subcommand fields (of which there can be at most one) refer to enums
/// containing one of several potential subcommands. They may be optional
/// (using `Option`) or required (no `Option`).
SubCommand,
/// Positional arguments are parsed literally if the input
/// does not begin with `-` or `--` and is not a subcommand.
/// They are parsed in declaration order, and only the last positional
/// argument in a type may be an `Option`, `Vec`, or have a default value.
Positional,
}
/// The type of a field on a `#![derive(FromArgs)]` struct.
///
/// This is a simple wrapper around `FieldKind` which includes the `syn::Ident`
/// of the attribute containing the field kind.
pub struct FieldType {
pub kind: FieldKind,
pub ident: syn::Ident,
}
/// A description of a `#![derive(FromArgs)]` struct.
///
/// Defaults to the docstring if one is present, or `#[argh(description = "...")]`
/// if one is provided.
pub struct Description {
/// Whether the description was an explicit annotation or whether it was a doc string.
pub explicit: bool,
pub content: syn::LitStr,
}
impl FieldAttrs {
pub fn parse(errors: &Errors, field: &syn::Field) -> Self {
let mut this = Self::default();
for attr in &field.attrs {
if is_doc_attr(attr) {
parse_attr_doc(errors, attr, &mut this.description);
continue;
}
let ml = if let Some(ml) = argh_attr_to_meta_list(errors, attr) {
ml
} else {
continue;
};
for meta in ml {
let name = meta.path();
if name.is_ident("arg_name") {
if let Some(m) = errors.expect_meta_name_value(&meta) {
this.parse_attr_arg_name(errors, m);
}
} else if name.is_ident("default") {
if let Some(m) = errors.expect_meta_name_value(&meta) {
this.parse_attr_default(errors, m);
}
} else if name.is_ident("description") {
if let Some(m) = errors.expect_meta_name_value(&meta) {
parse_attr_description(errors, m, &mut this.description);
}
} else if name.is_ident("from_str_fn") {
if let Some(m) = errors.expect_meta_list(&meta) {
this.parse_attr_from_str_fn(errors, m);
}
} else if name.is_ident("long") {
if let Some(m) = errors.expect_meta_name_value(&meta) {
this.parse_attr_long(errors, m);
}
} else if name.is_ident("option") {
parse_attr_field_type(errors, &meta, FieldKind::Option, &mut this.field_type);
} else if name.is_ident("short") {
if let Some(m) = errors.expect_meta_name_value(&meta) {
this.parse_attr_short(errors, m);
}
} else if name.is_ident("subcommand") {
parse_attr_field_type(
errors,
&meta,
FieldKind::SubCommand,
&mut this.field_type,
);
} else if name.is_ident("switch") {
parse_attr_field_type(errors, &meta, FieldKind::Switch, &mut this.field_type);
} else if name.is_ident("positional") {
parse_attr_field_type(
errors,
&meta,
FieldKind::Positional,
&mut this.field_type,
);
} else if name.is_ident("greedy") {
this.greedy = Some(name.clone());
} else if name.is_ident("hidden_help") {
this.hidden_help = true;
} else {
errors.err(
&meta,
concat!(
"Invalid field-level `argh` attribute\n",
"Expected one of: `arg_name`, `default`, `description`, `from_str_fn`, `greedy`, ",
"`long`, `option`, `short`, `subcommand`, `switch`, `hidden_help`",
),
);
}
}
}
if let (Some(default), Some(field_type)) = (&this.default, &this.field_type) {
match field_type.kind {
FieldKind::Option | FieldKind::Positional => {}
FieldKind::SubCommand | FieldKind::Switch => errors.err(
default,
"`default` may only be specified on `#[argh(option)]` \
or `#[argh(positional)]` fields",
),
}
}
match (&this.greedy, this.field_type.as_ref().map(|f| f.kind)) {
(Some(_), Some(FieldKind::Positional)) => {}
(Some(greedy), Some(_)) => errors.err(
&greedy,
"`greedy` may only be specified on `#[argh(positional)]` \
fields",
),
_ => {}
}
if let Some(d) = &this.description {
check_option_description(errors, d.content.value().trim(), d.content.span());
}
this
}
fn parse_attr_from_str_fn(&mut self, errors: &Errors, m: &syn::MetaList) {
parse_attr_fn_name(errors, m, "from_str_fn", &mut self.from_str_fn)
}
fn parse_attr_default(&mut self, errors: &Errors, m: &syn::MetaNameValue) {
parse_attr_single_string(errors, m, "default", &mut self.default);
}
fn parse_attr_arg_name(&mut self, errors: &Errors, m: &syn::MetaNameValue) {
parse_attr_single_string(errors, m, "arg_name", &mut self.arg_name);
}
fn parse_attr_long(&mut self, errors: &Errors, m: &syn::MetaNameValue) {
if let Some(first) = &self.long {
errors.duplicate_attrs("long", first, m);
} else if let syn::Expr::Path(syn::ExprPath { path, .. }) = &m.value
&& let Some(ident) = path.get_ident()
&& ident.to_string().eq_ignore_ascii_case("none")
{
self.long = Some(None);
} else if let Some(lit_str) = errors.expect_lit_str(&m.value) {
self.long = Some(Some(lit_str.clone()));
}
if let Some(Some(long)) = &self.long {
let value = long.value();
check_long_name(errors, long, &value);
}
}
fn parse_attr_short(&mut self, errors: &Errors, m: &syn::MetaNameValue) {
if let Some(first) = &self.short {
errors.duplicate_attrs("short", first, m);
} else if let Some(lit_char) = errors.expect_lit_char(&m.value) {
self.short = Some(lit_char.clone());
if !lit_char.value().is_ascii() {
errors.err(lit_char, "Short names must be ASCII");
}
}
}
}
pub(crate) fn check_long_name(errors: &Errors, spanned: &impl syn::spanned::Spanned, value: &str) {
if !value.is_ascii() {
errors.err(spanned, "Long names must be ASCII");
}
if !value
.chars()
.all(|c| c.is_lowercase() || c == '-' || c.is_ascii_digit())
{
errors.err(
spanned,
"Long names may only contain lowercase letters, digits, and dashes",
);
}
}
fn parse_attr_fn_name(
errors: &Errors,
m: &syn::MetaList,
attr_name: &str,
slot: &mut Option<syn::ExprPath>,
) {
if let Some(first) = slot {
errors.duplicate_attrs(attr_name, first, m);
}
*slot = errors.ok(m.parse_args());
}
fn parse_attr_field_type(
errors: &Errors,
meta: &syn::Meta,
kind: FieldKind,
slot: &mut Option<FieldType>,
) {
if let Some(path) = errors.expect_meta_word(meta) {
if let Some(first) = slot {
errors.duplicate_attrs("field kind", &first.ident, path);
} else if let Some(word) = path.get_ident() {
*slot = Some(FieldType {
kind,
ident: word.clone(),
});
}
}
}
// Whether the attribute is one like `#[<name> ...]`
fn is_matching_attr(name: &str, attr: &syn::Attribute) -> bool {
attr.path().segments.len() == 1 && attr.path().segments[0].ident == name
}
/// Checks for `#[doc ...]`, which is generated by doc comments.
fn is_doc_attr(attr: &syn::Attribute) -> bool {
is_matching_attr("doc", attr)
}
/// Checks for `#[argh ...]`
fn is_argh_attr(attr: &syn::Attribute) -> bool {
is_matching_attr("argh", attr)
}
/// Filters out non-`#[argh(...)]` attributes and converts to a sequence of `syn::Meta`.
fn argh_attr_to_meta_list(
errors: &Errors,
attr: &syn::Attribute,
) -> Option<impl IntoIterator<Item = syn::Meta>> {
if !is_argh_attr(attr) {
return None;
}
let ml = errors.expect_meta_list(&attr.meta)?;
errors.ok(ml.parse_args_with(
syn::punctuated::Punctuated::<syn::Meta, syn::Token![,]>::parse_terminated,
))
}
/// Represents a `#[derive(FromArgs)]` type's top-level attributes.
#[derive(Default)]
pub struct TypeAttrs {
pub is_subcommand: Option<syn::Ident>,
pub name: Option<syn::LitStr>,
pub description: Option<Description>,
pub examples: Vec<syn::LitStr>,
pub notes: Vec<syn::LitStr>,
pub error_codes: Vec<(syn::LitInt, syn::LitStr)>,
/// Arguments that trigger printing of the help message
pub help_triggers: Option<Vec<syn::LitStr>>,
}
impl TypeAttrs {
/// Parse top-level `#[argh(...)]` attributes
pub fn parse(errors: &Errors, derive_input: &syn::DeriveInput) -> Self {
let mut this = TypeAttrs::default();
for attr in &derive_input.attrs {
if is_doc_attr(attr) {
parse_attr_doc(errors, attr, &mut this.description);
continue;
}
let ml: Vec<syn::Meta> = if let Some(ml) = argh_attr_to_meta_list(errors, attr) {
ml.into_iter().collect()
} else {
continue;
};
for meta in ml.iter() {
let name = meta.path();
if name.is_ident("description") {
if let Some(m) = errors.expect_meta_name_value(meta) {
parse_attr_description(errors, m, &mut this.description);
}
} else if name.is_ident("error_code") {
if let Some(m) = errors.expect_meta_list(meta) {
this.parse_attr_error_code(errors, m);
}
} else if name.is_ident("example") {
if let Some(m) = errors.expect_meta_name_value(meta) {
this.parse_attr_example(errors, m);
}
} else if name.is_ident("name") {
if let Some(m) = errors.expect_meta_name_value(meta) {
this.parse_attr_name(errors, m);
}
} else if name.is_ident("note") {
if let Some(m) = errors.expect_meta_name_value(meta) {
this.parse_attr_note(errors, m);
}
} else if name.is_ident("subcommand") {
if let Some(ident) = errors.expect_meta_word(meta).and_then(|p| p.get_ident()) {
this.parse_attr_subcommand(errors, ident);
}
} else if name.is_ident("help_triggers") {
if let Some(m) = errors.expect_meta_list(meta) {
Self::parse_help_triggers(m, errors, &mut this);
}
} else {
errors.err(
meta,
concat!(
"Invalid type-level `argh` attribute\n",
"Expected one of: `description`, `error_code`, `example`, `name`, ",
"`note`, `subcommand`, `help_triggers`",
),
);
}
}
if this.is_subcommand.is_some() && this.help_triggers.is_some() {
let help_meta = ml
.iter()
.find(|meta| meta.path().is_ident("help_triggers"))
.unwrap();
errors.err(help_meta, "Cannot use `help_triggers` on a subcommand");
}
}
this.check_error_codes(errors);
this
}
/// Checks that error codes are within range for `i32` and that they are
/// never duplicated.
fn check_error_codes(&self, errors: &Errors) {
// map from error code to index
let mut map: HashMap<u64, usize> = HashMap::new();
for (index, (lit_int, _lit_str)) in self.error_codes.iter().enumerate() {
let value = match lit_int.base10_parse::<u64>() {
Ok(v) => v,
Err(e) => {
errors.push(e);
continue;
}
};
if value > (i32::MAX as u64) {
errors.err(lit_int, "Error code out of range for `i32`");
}
match map.entry(value) {
Entry::Occupied(previous) => {
let previous_index = *previous.get();
let (previous_lit_int, _previous_lit_str) = &self.error_codes[previous_index];
errors.err(lit_int, &format!("Duplicate error code {}", value));
errors.err(
previous_lit_int,
&format!("Error code {} previously defined here", value),
);
}
Entry::Vacant(slot) => {
slot.insert(index);
}
}
}
}
fn parse_attr_error_code(&mut self, errors: &Errors, ml: &syn::MetaList) {
errors.ok(ml.parse_args_with(|input: syn::parse::ParseStream| {
let err_code = input.parse()?;
input.parse::<syn::Token![,]>()?;
let err_msg = input.parse()?;
if let (Some(err_code), Some(err_msg)) = (
errors.expect_lit_int(&err_code),
errors.expect_lit_str(&err_msg),
) {
self.error_codes.push((err_code.clone(), err_msg.clone()));
}
Ok(())
}));
}
fn parse_attr_example(&mut self, errors: &Errors, m: &syn::MetaNameValue) {
parse_attr_multi_string(errors, m, &mut self.examples)
}
fn parse_attr_name(&mut self, errors: &Errors, m: &syn::MetaNameValue) {
parse_attr_single_string(errors, m, "name", &mut self.name);
if let Some(name) = &self.name
&& name.value() == "help"
{
errors.err(name, "Custom `help` commands are not supported.");
}
}
fn parse_attr_note(&mut self, errors: &Errors, m: &syn::MetaNameValue) {
parse_attr_multi_string(errors, m, &mut self.notes)
}
fn parse_attr_subcommand(&mut self, errors: &Errors, ident: &syn::Ident) {
if let Some(first) = &self.is_subcommand {
errors.duplicate_attrs("subcommand", first, ident);
} else {
self.is_subcommand = Some(ident.clone());
}
}
// get the list of arguments that trigger printing of the help message as a vector of strings (help_arguments("-h", "--help", "help"))
fn parse_help_triggers(m: &syn::MetaList, errors: &Errors, this: &mut TypeAttrs) {
let parser = Punctuated::<syn::Expr, syn::Token![,]>::parse_terminated;
match parser.parse(m.tokens.clone().into()) {
Ok(args) => {
let mut triggers = Vec::new();
for arg in args {
if let syn::Expr::Lit(syn::ExprLit {
lit: syn::Lit::Str(lit_str),
..
}) = arg
{
triggers.push(lit_str);
}
}
this.help_triggers = Some(triggers);
}
Err(err) => errors.push(err),
}
}
}
/// Represents an enum variant's attributes.
#[derive(Default)]
pub struct VariantAttrs {
pub is_dynamic: Option<syn::Path>,
}
impl VariantAttrs {
/// Parse enum variant `#[argh(...)]` attributes
pub fn parse(errors: &Errors, variant: &syn::Variant) -> Self {
let mut this = VariantAttrs::default();
let fields = match &variant.fields {
syn::Fields::Named(fields) => Some(&fields.named),
syn::Fields::Unnamed(fields) => Some(&fields.unnamed),
syn::Fields::Unit => None,
};
for field in fields.into_iter().flatten() {
for attr in &field.attrs {
if is_argh_attr(attr) {
err_unused_enum_attr(errors, attr);
}
}
}
for attr in &variant.attrs {
let ml = if let Some(ml) = argh_attr_to_meta_list(errors, attr) {
ml
} else {
continue;
};
for meta in ml {
let name = meta.path();
if name.is_ident("dynamic") {
if let Some(prev) = this.is_dynamic.as_ref() {
errors.duplicate_attrs("dynamic", prev, &meta);
} else {
this.is_dynamic = errors.expect_meta_word(&meta).cloned();
}
} else {
errors.err(
&meta,
"Invalid variant-level `argh` attribute\n\
Variants can only have the #[argh(dynamic)] attribute.",
);
}
}
}
this
}
}
fn check_option_description(errors: &Errors, desc: &str, span: Span) {
let chars = &mut desc.trim().chars();
match (chars.next(), chars.next()) {
(Some(x), _) if x.is_lowercase() => {}
// If both the first and second letter are not lowercase,
// this is likely an initialism which should be allowed.
(Some(x), Some(y)) if !x.is_lowercase() && (y.is_alphanumeric() && !y.is_lowercase()) => {}
_ => {
errors.err_span(span, "Descriptions must begin with a lowercase letter");
}
}
}
fn parse_attr_single_string(
errors: &Errors,
m: &syn::MetaNameValue,
name: &str,
slot: &mut Option<syn::LitStr>,
) {
if let Some(first) = slot {
errors.duplicate_attrs(name, first, m);
} else if let Some(lit_str) = errors.expect_lit_str(&m.value) {
*slot = Some(lit_str.clone());
}
}
fn parse_attr_multi_string(errors: &Errors, m: &syn::MetaNameValue, list: &mut Vec<syn::LitStr>) {
if let Some(lit_str) = errors.expect_lit_str(&m.value) {
list.push(lit_str.clone());
}
}
fn parse_attr_doc(errors: &Errors, attr: &syn::Attribute, slot: &mut Option<Description>) {
let nv = if let Some(nv) = errors.expect_meta_name_value(&attr.meta) {
nv
} else {
return;
};
// Don't replace an existing explicit description.
if slot.as_ref().map(|d| d.explicit).unwrap_or(false) {
return;
}
if let Some(lit_str) = errors.expect_lit_str(&nv.value) {
let lit_str = if let Some(previous) = slot {
let previous = &previous.content;
let previous_span = previous.span();
syn::LitStr::new(
&(previous.value() + &unescape_doc(lit_str.value())),
previous_span,
)
} else {
syn::LitStr::new(&unescape_doc(lit_str.value()), lit_str.span())
};
*slot = Some(Description {
explicit: false,
content: lit_str,
});
}
}
/// Replaces escape sequences in doc-comments with the characters they represent.
///
/// Rustdoc understands CommonMark escape sequences consisting of a backslash followed by an ASCII
/// punctuation character. Any other backslash is treated as a literal backslash.
fn unescape_doc(s: String) -> String {
let mut result = String::with_capacity(s.len());
let mut characters = s.chars().peekable();
while let Some(mut character) = characters.next() {
if character == '\\'
&& let Some(next_character) = characters.peek()
&& next_character.is_ascii_punctuation()
{
character = *next_character;
characters.next();
}
// Braces must be escaped as this string will be used as a format string
if character == '{' || character == '}' {
result.push(character);
}
result.push(character);
}
result
}
fn parse_attr_description(errors: &Errors, m: &syn::MetaNameValue, slot: &mut Option<Description>) {
let lit_str = if let Some(lit_str) = errors.expect_lit_str(&m.value) {
lit_str
} else {
return;
};
// Don't allow multiple explicit (non doc-comment) descriptions
if let Some(description) = slot
&& description.explicit
{
errors.duplicate_attrs("description", &description.content, lit_str);
}
*slot = Some(Description {
explicit: true,
content: lit_str.clone(),
});
}
/// Checks that a `#![derive(FromArgs)]` enum has an `#[argh(subcommand)]`
/// attribute and that it does not have any other type-level `#[argh(...)]` attributes.
pub fn check_enum_type_attrs(errors: &Errors, type_attrs: &TypeAttrs, type_span: &Span) {
let TypeAttrs {
is_subcommand,
name,
description,
examples,
notes,
error_codes,
help_triggers,
} = type_attrs;
// Ensure that `#[argh(subcommand)]` is present.
if is_subcommand.is_none() {
errors.err_span(
*type_span,
concat!(
"`#![derive(FromArgs)]` on `enum`s can only be used to enumerate subcommands.\n",
"Consider adding `#[argh(subcommand)]` to the `enum` declaration.",
),
);
}
// Error on all other type-level attributes.
if let Some(name) = name {
err_unused_enum_attr(errors, name);
}
if let Some(description) = description
&& description.explicit
{
err_unused_enum_attr(errors, &description.content);
}
if let Some(example) = examples.first() {
err_unused_enum_attr(errors, example);
}
if let Some(note) = notes.first() {
err_unused_enum_attr(errors, note);
}
if let Some(err_code) = error_codes.first() {
err_unused_enum_attr(errors, &err_code.0);
}
if let Some(triggers) = help_triggers
&& let Some(trigger) = triggers.first()
{
err_unused_enum_attr(errors, trigger);
}
}
fn err_unused_enum_attr(errors: &Errors, location: &impl syn::spanned::Spanned) {
errors.err(
location,
concat!(
"Unused `argh` attribute on `#![derive(FromArgs)]` enum. ",
"Such `enum`s can only be used to dispatch to subcommands, ",
"and should only contain the #[argh(subcommand)] attribute.",
),
);
}

View File

@@ -0,0 +1,19 @@
#![recursion_limit = "256"]
use proc_macro::TokenStream;
mod argh;
mod decodable;
#[proc_macro_derive(Decodable)]
pub fn derive_decodable(input: TokenStream) -> TokenStream {
decodable::derive_decodable(input)
}
/// Entrypoint for `#[derive(FromArgs)]`.
#[proc_macro_derive(FromArgs, attributes(argh))]
pub fn argh_derive(input: TokenStream) -> TokenStream {
let ast = syn::parse_macro_input!(input as syn::DeriveInput);
let token = argh::impl_from_args(&ast);
token.into()
}

View File

@@ -4,7 +4,10 @@ use crate::{
fd_path, fd_set_attr,
};
use libc::{dirent, mode_t};
use nix::{errno::Errno, fcntl::AtFlags, fcntl::OFlag, sys::stat::Mode, unistd::UnlinkatFlags};
use nix::errno::Errno;
use nix::fcntl::{AtFlags, OFlag};
use nix::sys::stat::Mode;
use nix::unistd::UnlinkatFlags;
use std::fs::File;
use std::ops::Deref;
use std::os::fd::{AsFd, AsRawFd, BorrowedFd, IntoRawFd, OwnedFd, RawFd};

View File

@@ -13,6 +13,7 @@ struct mmap_data : public byte_data {
(sizeof(void *) == 4 && BLKGETSIZE64 == 0x80041272));
ALLOW_MOVE_ONLY(mmap_data)
mmap_data() = default;
explicit mmap_data(const char *name, bool rw = false);
mmap_data(int dirfd, const char *name, bool rw = false);
mmap_data(int fd, size_t sz, bool rw = false);

View File

@@ -4,13 +4,10 @@ use crate::{
};
use bytemuck::{Pod, bytes_of, bytes_of_mut};
use libc::{c_uint, makedev, mode_t};
use nix::{
errno::Errno,
fcntl::{AT_FDCWD, OFlag},
sys::stat::{FchmodatFlags, Mode},
unistd::AccessFlags,
unistd::{Gid, Uid},
};
use nix::errno::Errno;
use nix::fcntl::{AT_FDCWD, OFlag};
use nix::sys::stat::{FchmodatFlags, Mode};
use nix::unistd::{AccessFlags, Gid, Uid};
use num_traits::AsPrimitive;
use std::cmp::min;
use std::ffi::CStr;

View File

@@ -1,14 +1,13 @@
#![feature(vec_into_raw_parts)]
#![allow(clippy::missing_safety_doc)]
pub use const_format;
pub use libc;
pub use nix;
pub use {const_format, libc, nix};
pub use cstr::{
FsPathFollow, StrErr, Utf8CStr, Utf8CStrBuf, Utf8CStrBufArr, Utf8CStrBufRef, Utf8CString,
};
use cxx_extern::*;
pub use derive;
pub use dir::*;
pub use ffi::{Utf8CStrRef, fork_dont_care, set_nice_name};
pub use files::*;
@@ -16,6 +15,7 @@ pub use logging::*;
pub use misc::*;
pub use result::*;
pub mod argh;
pub mod cstr;
mod cxx_extern;
mod dir;

View File

@@ -76,7 +76,7 @@ fn log_with_writer<F: FnOnce(LogWriter)>(level: LogLevel, f: F) {
return;
}
f(logger.write);
if logger.flags.contains(LogFlag::EXIT_ON_ERROR) {
if matches!(level, LogLevel::Error) && logger.flags.contains(LogFlag::EXIT_ON_ERROR) {
exit(-1);
}
}

View File

@@ -15,7 +15,7 @@ clazz(clazz &&) = delete;
#define ALLOW_MOVE_ONLY(clazz) \
clazz(const clazz&) = delete; \
clazz(clazz &&o) { swap(o); } \
clazz(clazz &&o) : clazz() { swap(o); } \
clazz& operator=(clazz &&o) { swap(o); return *this; }
struct Utf8CStr;

View File

@@ -1,16 +1,13 @@
use super::argh::{EarlyExit, MissingRequirements};
use crate::{Utf8CStr, Utf8CString, cstr, ffi};
use argh::{EarlyExit, MissingRequirements};
use libc::c_char;
use std::{
fmt,
fmt::Arguments,
io::Write,
mem::ManuallyDrop,
process::exit,
slice, str,
sync::Arc,
sync::atomic::{AtomicPtr, Ordering},
};
use std::fmt::Arguments;
use std::io::Write;
use std::mem::ManuallyDrop;
use std::process::exit;
use std::sync::Arc;
use std::sync::atomic::{AtomicPtr, Ordering};
use std::{fmt, slice, str};
pub fn errno() -> &'static mut i32 {
unsafe { &mut *libc::__errno() }
@@ -87,17 +84,16 @@ impl<T> EarlyExitExt<T> for Result<T, EarlyExit> {
fn on_early_exit<F: FnOnce()>(self, print_help_msg: F) -> T {
match self {
Ok(t) => t,
Err(EarlyExit { output, status }) => match status {
Ok(_) => {
Err(EarlyExit { output, is_help }) => {
if is_help {
print_help_msg();
exit(0)
}
Err(_) => {
} else {
eprintln!("{output}");
print_help_msg();
exit(1)
}
},
}
}
}
}

View File

@@ -12,12 +12,11 @@ cxx-gen = { workspace = true }
pb-rs = { workspace = true }
[dependencies]
base = { path = "../base" }
base = { workspace = true }
cxx = { workspace = true }
byteorder = { workspace = true }
size = { workspace = true }
quick-protobuf = { workspace = true }
argh = { workspace = true }
sha1 = { workspace = true }
sha2 = { workspace = true }
digest = { workspace = true }

View File

@@ -53,6 +53,19 @@ static bool check_env(const char *name) {
return val != nullptr && val == "true"sv;
}
static bool guess_lzma(const uint8_t *buf, size_t len) {
// 0 : (pb * 5 + lp) * 9 + lc
// 1 - 4 : dict size, must be 2^n
// 5 - 12: all 0xFF
if (len <= 13) return false;
if (memcmp(buf, "\x5d", 1) != 0) return false;
uint32_t dict_sz = 0;
memcpy(&dict_sz, buf + 1, sizeof(dict_sz));
if (dict_sz == 0 || (dict_sz & (dict_sz - 1)) != 0) return false;
if (memcmp(buf + 5, "\xff\xff\xff\xff\xff\xff\xff\xff", 8) != 0) return false;
return true;
}
FileFormat check_fmt(const void *buf, size_t len) {
if (CHECKED_MATCH(CHROMEOS_MAGIC)) {
return FileFormat::CHROMEOS;
@@ -66,8 +79,7 @@ FileFormat check_fmt(const void *buf, size_t len) {
return FileFormat::LZOP;
} else if (CHECKED_MATCH(XZ_MAGIC)) {
return FileFormat::XZ;
} else if (len >= 13 && memcmp(buf, "\x5d\x00\x00", 3) == 0
&& (((char *)buf)[12] == '\xff' || ((char *)buf)[12] == '\x00')) {
} else if (guess_lzma(static_cast<const uint8_t *>(buf), len)) {
return FileFormat::LZMA;
} else if (CHECKED_MATCH(BZIP_MAGIC)) {
return FileFormat::BZIP2;
@@ -279,9 +291,10 @@ static int find_dtb_offset(const uint8_t *buf, unsigned sz) {
auto fdt_hdr = reinterpret_cast<const fdt_header *>(curr);
// Check that fdt_header.totalsize does not overflow kernel image size
// Check that fdt_header.totalsize does not overflow kernel image size or is empty dtb
// https://github.com/torvalds/linux/commit/7b937cc243e5b1df8780a0aa743ce800df6c68d1
uint32_t totalsize = fdt_hdr->totalsize;
if (totalsize > end - curr)
if (totalsize > end - curr || totalsize <= 0x48)
continue;
// Check that fdt_header.off_dt_struct does not overflow kernel image size

View File

@@ -1,4 +1,5 @@
use pb_rs::{ConfigBuilder, types::FileDescriptor};
use pb_rs::ConfigBuilder;
use pb_rs::types::FileDescriptor;
use crate::codegen::gen_cxx_binding;

View File

@@ -6,9 +6,11 @@ use crate::patch::hexpatch;
use crate::payload::extract_boot_from_payload;
use crate::sign::{sha1_hash, sign_boot_image};
use argh::{CommandInfo, EarlyExit, FromArgs, SubCommand};
use base::libc::umask;
use base::nix::fcntl::OFlag;
use base::{
CmdArgs, EarlyExitExt, LoggedResult, MappedFile, PositionalArgParser, ResultExt, Utf8CStr,
Utf8CString, WriteExt, cmdline_logging, cstr, libc::umask, log_err, nix::fcntl::OFlag,
Utf8CString, WriteExt, argh, cmdline_logging, cstr, log_err,
};
use std::ffi::c_char;
use std::io::{Seek, SeekFrom, Write};
@@ -41,9 +43,9 @@ enum Action {
#[derive(FromArgs)]
#[argh(subcommand, name = "unpack")]
struct Unpack {
#[argh(switch, short = 'n')]
#[argh(switch, short = 'n', long = none)]
no_decompress: bool,
#[argh(switch, short = 'h')]
#[argh(switch, short = 'h', long = none)]
dump_header: bool,
#[argh(positional)]
img: Utf8CString,
@@ -52,12 +54,12 @@ struct Unpack {
#[derive(FromArgs)]
#[argh(subcommand, name = "repack")]
struct Repack {
#[argh(switch, short = 'n')]
#[argh(switch, short = 'n', long = none)]
no_compress: bool,
#[argh(positional)]
img: Utf8CString,
#[argh(positional, default = r#"Utf8CString::from("new-boot.img")"#)]
out: Utf8CString,
#[argh(positional)]
out: Option<Utf8CString>,
}
#[derive(FromArgs)]
@@ -75,33 +77,24 @@ struct Sign {
#[argh(positional)]
img: Utf8CString,
#[argh(positional)]
args: Vec<Utf8CString>,
name: Option<Utf8CString>,
#[argh(positional)]
cert: Option<Utf8CString>,
#[argh(positional)]
key: Option<Utf8CString>,
}
#[derive(FromArgs)]
#[argh(subcommand, name = "extract")]
struct Extract {
#[argh(positional)]
payload: Utf8CString,
#[argh(positional)]
partition: Option<Utf8CString>,
#[argh(positional)]
outfile: Option<Utf8CString>,
}
impl FromArgs for Extract {
fn from_args(_command_name: &[&str], args: &[&str]) -> Result<Self, EarlyExit> {
let mut parse = PositionalArgParser(args.iter());
Ok(Extract {
payload: parse.required("payload.bin")?,
partition: parse.optional(),
outfile: parse.last_optional()?,
})
}
}
impl SubCommand for Extract {
const COMMAND: &'static CommandInfo = &CommandInfo {
name: "extract",
description: "",
};
}
#[derive(FromArgs)]
#[argh(subcommand, name = "hexpatch")]
struct HexPatch {
@@ -134,7 +127,7 @@ struct Dtb {
#[derive(FromArgs)]
#[argh(subcommand, name = "split")]
struct Split {
#[argh(switch, short = 'n')]
#[argh(switch, short = 'n', long = none)]
no_decompress: bool,
#[argh(positional)]
file: Utf8CString,
@@ -184,28 +177,15 @@ impl SubCommand for Compress {
};
}
#[derive(FromArgs)]
#[argh(subcommand, name = "decompress")]
struct Decompress {
#[argh(positional)]
file: Utf8CString,
#[argh(positional)]
out: Option<Utf8CString>,
}
impl FromArgs for Decompress {
fn from_args(_command_name: &[&str], args: &[&str]) -> Result<Self, EarlyExit> {
let mut iter = PositionalArgParser(args.iter());
Ok(Decompress {
file: iter.required("infile")?,
out: iter.last_optional()?,
})
}
}
impl SubCommand for Decompress {
const COMMAND: &'static CommandInfo = &CommandInfo {
name: "decompress",
description: "",
};
}
fn print_usage(cmd: &str) {
eprintln!(
r#"MagiskBoot - Boot Image Modification Tool
@@ -384,21 +364,24 @@ fn boot_main(cmds: CmdArgs) -> LoggedResult<i32> {
img,
out,
}) => {
repack(&img, &out, no_compress);
repack(
&img,
out.as_deref().unwrap_or(cstr!("new-boot.img")),
no_compress,
);
}
Action::Verify(Verify { img, cert }) => {
if !verify_cmd(&img, cert.as_deref()) {
return log_err!();
}
}
Action::Sign(Sign { img, args }) => {
let mut iter = args.iter();
sign_cmd(
&img,
iter.next().map(AsRef::as_ref),
iter.next().map(AsRef::as_ref),
iter.next().map(AsRef::as_ref),
)?;
Action::Sign(Sign {
img,
name,
cert,
key,
}) => {
sign_cmd(&img, name.as_deref(), cert.as_deref(), key.as_deref())?;
}
Action::Extract(Extract {
payload,

View File

@@ -1,13 +1,17 @@
use crate::ffi::{FileFormat, check_fmt};
use base::{
Chunker, FileOrStd, LoggedResult, ReadExt, ResultExt, Utf8CStr, Utf8CString, WriteExt, log_err,
nix::fcntl::OFlag,
};
use bzip2::{Compression as BzCompression, read::BzDecoder, write::BzEncoder};
use flate2::{Compression as GzCompression, read::MultiGzDecoder, write::GzEncoder};
use base::nix::fcntl::OFlag;
use base::{Chunker, FileOrStd, LoggedResult, ReadExt, Utf8CStr, Utf8CString, WriteExt, log_err};
use bzip2::Compression as BzCompression;
use bzip2::read::BzDecoder;
use bzip2::write::BzEncoder;
use flate2::Compression as GzCompression;
use flate2::read::MultiGzDecoder;
use flate2::write::GzEncoder;
use lz4::block::CompressionMode;
use lz4::liblz4::BlockChecksum;
use lz4::{
BlockMode, BlockSize, ContentChecksum, Decoder as LZ4FrameDecoder, Encoder as LZ4FrameEncoder,
EncoderBuilder as LZ4FrameEncoderBuilder, block::CompressionMode, liblz4::BlockChecksum,
EncoderBuilder as LZ4FrameEncoderBuilder,
};
use lzma_rust2::{CheckType, LzmaOptions, LzmaReader, LzmaWriter, XzOptions, XzReader, XzWriter};
use std::cmp::min;
@@ -212,16 +216,21 @@ impl<R: Read> Read for LZ4BlockDecoder<R> {
// Top-level APIs
pub fn get_encoder<'a, W: Write + 'a>(format: FileFormat, w: W) -> Box<dyn WriteFinish<W> + 'a> {
match format {
pub fn get_encoder<'a, W: Write + 'a>(
format: FileFormat,
w: W,
) -> std::io::Result<Box<dyn WriteFinish<W> + 'a>> {
Ok(match format {
FileFormat::XZ => {
let mut opt = XzOptions::with_preset(9);
opt.set_check_sum_type(CheckType::Crc32);
Box::new(XzWriter::new(w, opt).unwrap())
}
FileFormat::LZMA => {
Box::new(LzmaWriter::new_use_header(w, &LzmaOptions::with_preset(9), None).unwrap())
Box::new(XzWriter::new(w, opt)?)
}
FileFormat::LZMA => Box::new(LzmaWriter::new_use_header(
w,
&LzmaOptions::with_preset(9),
None,
)?),
FileFormat::BZIP2 => Box::new(BzEncoder::new(w, BzCompression::best())),
FileFormat::LZ4 => {
let encoder = LZ4FrameEncoderBuilder::new()
@@ -231,8 +240,7 @@ pub fn get_encoder<'a, W: Write + 'a>(format: FileFormat, w: W) -> Box<dyn Write
.block_checksum(BlockChecksum::BlockChecksumEnabled)
.level(9)
.auto_flush(true)
.build(w)
.unwrap();
.build(w)?;
Box::new(encoder)
}
FileFormat::LZ4_LEGACY => Box::new(LZ4BlockEncoder::new(w, false)),
@@ -244,23 +252,26 @@ pub fn get_encoder<'a, W: Write + 'a>(format: FileFormat, w: W) -> Box<dyn Write
maximum_block_splits: 1,
..Default::default()
};
Box::new(ZopFliEncoder::new_buffered(opt, BlockType::Dynamic, w).unwrap())
Box::new(ZopFliEncoder::new_buffered(opt, BlockType::Dynamic, w)?)
}
FileFormat::GZIP => Box::new(GzEncoder::new(w, GzCompression::best())),
_ => unreachable!(),
}
})
}
pub fn get_decoder<'a, R: Read + 'a>(format: FileFormat, r: R) -> Box<dyn Read + 'a> {
match format {
pub fn get_decoder<'a, R: Read + 'a>(
format: FileFormat,
r: R,
) -> std::io::Result<Box<dyn Read + 'a>> {
Ok(match format {
FileFormat::XZ => Box::new(XzReader::new(r, true)),
FileFormat::LZMA => Box::new(LzmaReader::new_mem_limit(r, u32::MAX, None).unwrap()),
FileFormat::LZMA => Box::new(LzmaReader::new_mem_limit(r, u32::MAX, None)?),
FileFormat::BZIP2 => Box::new(BzDecoder::new(r)),
FileFormat::LZ4 => Box::new(LZ4FrameDecoder::new(r).unwrap()),
FileFormat::LZ4 => Box::new(LZ4FrameDecoder::new(r)?),
FileFormat::LZ4_LG | FileFormat::LZ4_LEGACY => Box::new(LZ4BlockDecoder::new(r)),
FileFormat::ZOPFLI | FileFormat::GZIP => Box::new(MultiGzDecoder::new(r)),
_ => unreachable!(),
}
})
}
// C++ FFI
@@ -268,9 +279,9 @@ pub fn get_decoder<'a, R: Read + 'a>(format: FileFormat, r: R) -> Box<dyn Read +
pub fn compress_bytes(format: FileFormat, in_bytes: &[u8], out_fd: RawFd) {
let mut out_file = unsafe { ManuallyDrop::new(File::from_raw_fd(out_fd)) };
let mut encoder = get_encoder(format, out_file.deref_mut());
let _: LoggedResult<()> = try {
encoder.write_all(in_bytes)?;
let mut encoder = get_encoder(format, out_file.deref_mut())?;
std::io::copy(&mut Cursor::new(in_bytes), encoder.deref_mut())?;
encoder.finish()?;
};
}
@@ -278,8 +289,10 @@ pub fn compress_bytes(format: FileFormat, in_bytes: &[u8], out_fd: RawFd) {
pub fn decompress_bytes(format: FileFormat, in_bytes: &[u8], out_fd: RawFd) {
let mut out_file = unsafe { ManuallyDrop::new(File::from_raw_fd(out_fd)) };
let mut decoder = get_decoder(format, in_bytes);
std::io::copy(decoder.as_mut(), out_file.deref_mut()).log_ok();
let _: LoggedResult<()> = try {
let mut decoder = get_decoder(format, in_bytes)?;
std::io::copy(decoder.as_mut(), out_file.deref_mut())?;
};
}
// Command-line entry points
@@ -335,7 +348,7 @@ pub(crate) fn decompress_cmd(infile: &Utf8CStr, outfile: Option<&Utf8CStr>) -> L
FileOrStd::File(outfile.create(OFlag::O_WRONLY | OFlag::O_TRUNC, 0o644)?)
};
let mut decoder = get_decoder(format, Cursor::new(buf).chain(input.as_file()));
let mut decoder = get_decoder(format, Cursor::new(buf).chain(input.as_file()))?;
std::io::copy(decoder.as_mut(), &mut output.as_file())?;
if rm_in {
@@ -378,7 +391,7 @@ pub(crate) fn compress_cmd(
FileOrStd::File(outfile)
};
let mut encoder = get_encoder(method, output.as_file());
let mut encoder = get_encoder(method, output.as_file())?;
std::io::copy(&mut input.as_file(), encoder.as_mut())?;
encoder.finish()?;

View File

@@ -1,5 +1,10 @@
#![allow(clippy::useless_conversion)]
use argh::FromArgs;
use base::argh;
use bytemuck::{Pod, Zeroable, from_bytes};
use num_traits::cast::AsPrimitive;
use size::{Base, Size, Style};
use std::cmp::Ordering;
use std::collections::{BTreeMap, HashMap};
use std::fmt::{Display, Formatter};
@@ -9,11 +14,6 @@ use std::mem::size_of;
use std::process::exit;
use std::str;
use argh::FromArgs;
use bytemuck::{Pod, Zeroable, from_bytes};
use num_traits::cast::AsPrimitive;
use size::{Base, Size, Style};
use crate::check_env;
use crate::compress::{get_decoder, get_encoder};
use crate::ffi::FileFormat;
@@ -23,9 +23,10 @@ use base::libc::{
S_IWOTH, S_IWUSR, S_IXGRP, S_IXOTH, S_IXUSR, dev_t, gid_t, major, makedev, minor, mknod,
mode_t, uid_t,
};
use base::nix::fcntl::OFlag;
use base::{
BytesExt, EarlyExitExt, LoggedResult, MappedFile, OptionExt, ResultExt, Utf8CStr, Utf8CStrBuf,
WriteExt, cstr, log_err, nix::fcntl::OFlag,
WriteExt, cstr, log_err,
};
#[derive(FromArgs)]
@@ -690,8 +691,8 @@ impl CpioEntry {
if self.mode & S_IFMT != S_IFREG {
return false;
}
let mut encoder = get_encoder(FileFormat::XZ, Vec::new());
let Ok(data): std::io::Result<Vec<u8>> = (try {
let mut encoder = get_encoder(FileFormat::XZ, Vec::new())?;
encoder.write_all(&self.data)?;
encoder.finish()?
}) else {
@@ -709,7 +710,7 @@ impl CpioEntry {
}
let Ok(data): std::io::Result<Vec<u8>> = (try {
let mut decoder = get_decoder(FileFormat::XZ, Cursor::new(&self.data));
let mut decoder = get_decoder(FileFormat::XZ, Cursor::new(&self.data))?;
let mut data = Vec::new();
std::io::copy(decoder.as_mut(), &mut data)?;
data

View File

@@ -1,14 +1,11 @@
use argh::FromArgs;
use base::{LoggedResult, MappedFile, Utf8CStr, argh};
use fdt::node::{FdtNode, NodeProperty};
use fdt::{Fdt, FdtError};
use std::cell::UnsafeCell;
use argh::FromArgs;
use fdt::{
Fdt, FdtError,
node::{FdtNode, NodeProperty},
};
use base::{LoggedResult, MappedFile, Utf8CStr};
use crate::{check_env, patch::patch_verity};
use crate::check_env;
use crate::patch::patch_verity;
#[derive(FromArgs)]
#[argh(subcommand)]
@@ -21,7 +18,7 @@ pub(crate) enum DtbAction {
#[derive(FromArgs)]
#[argh(subcommand, name = "print")]
pub(crate) struct Print {
#[argh(switch, short = 'f')]
#[argh(switch, short = 'f', long = none)]
fstab: bool,
}

View File

@@ -1,5 +1,4 @@
#![feature(format_args_nl)]
#![feature(btree_extract_if)]
#![feature(iter_intersperse)]
#![feature(try_blocks)]

View File

@@ -1,15 +1,13 @@
use crate::compress::get_decoder;
use crate::ffi::check_fmt;
use crate::proto::update_metadata::{DeltaArchiveManifest, mod_InstallOperation::Type};
use crate::proto::update_metadata::DeltaArchiveManifest;
use crate::proto::update_metadata::mod_InstallOperation::Type;
use base::{LoggedError, LoggedResult, ReadSeekExt, ResultExt, WriteExt, error};
use byteorder::{BigEndian, ReadBytesExt};
use quick_protobuf::{BytesReader, MessageRead};
use std::io::Cursor;
use std::{
fs::File,
io::{BufReader, Read, Seek, SeekFrom, Write},
os::fd::FromRawFd,
};
use std::fs::File;
use std::io::{BufReader, Cursor, Read, Seek, SeekFrom, Write};
use std::os::fd::FromRawFd;
macro_rules! bad_payload {
($msg:literal) => {{
@@ -166,8 +164,8 @@ pub fn extract_boot_from_payload(
out_file.seek(SeekFrom::Start(out_offset))?;
let fmt = check_fmt(data);
let mut decoder = get_decoder(fmt, Cursor::new(data));
let Ok(_): std::io::Result<()> = (try {
let mut decoder = get_decoder(fmt, Cursor::new(data))?;
std::io::copy(decoder.as_mut(), &mut out_file)?;
}) else {
return Err(bad_payload!("decompression failed"));

View File

@@ -8,16 +8,19 @@ crate-type = ["staticlib"]
path = "lib.rs"
[features]
default = ["check-signature"]
default = ["check-signature", "check-client", "su-check-db"]
# Disable these features for easier debugging during development
check-signature = []
check-client = []
su-check-db = []
[build-dependencies]
cxx-gen = { workspace = true }
pb-rs = { workspace = true }
[dependencies]
base = { path = "../base", features = ["selinux"] }
derive = { path = "derive" }
base = { workspace = true, features = ["selinux"] }
cxx = { workspace = true }
num-traits = { workspace = true }
num-derive = { workspace = true }
@@ -25,6 +28,5 @@ quick-protobuf = { workspace = true }
bytemuck = { workspace = true, features = ["derive"] }
thiserror = { workspace = true }
bit-set = { workspace = true }
argh = { workspace = true }
nix = { workspace = true, features = ["fs", "mount", "poll", "signal", "term", "user", "zerocopy"] }
bitflags = { workspace = true }

View File

@@ -75,8 +75,8 @@ impl MagiskD {
let tmp_bb = buf.append_path(get_magisk_tmp()).append_path(BBPATH);
tmp_bb.mkdirs(0o755).ok();
tmp_bb.append_path("busybox");
tmp_bb.follow_link().chmod(0o755).log_ok();
busybox.copy_to(tmp_bb).ok();
tmp_bb.follow_link().chmod(0o755).log_ok();
// Install busybox applets
Command::new(&tmp_bb)
@@ -185,7 +185,9 @@ impl MagiskD {
setup_preinit_dir();
self.ensure_manager();
self.zygisk.lock().unwrap().reset(true);
if self.zygisk_enabled.load(Ordering::Relaxed) {
self.zygisk.lock().unwrap().reset(true);
}
}
pub fn boot_stage_handler(&self, client: UnixStream, code: RequestCode) {

View File

@@ -1,4 +1,5 @@
use pb_rs::{ConfigBuilder, types::FileDescriptor};
use pb_rs::ConfigBuilder;
use pb_rs::types::FileDescriptor;
use crate::codegen::gen_cxx_binding;

View File

@@ -21,19 +21,17 @@ use base::{
AtomicArc, BufReadExt, FileAttr, FsPathBuilder, LoggedResult, ReadExt, ResultExt, Utf8CStr,
Utf8CStrBuf, WriteExt, cstr, fork_dont_care, info, libc, log_err, set_nice_name,
};
use nix::{
fcntl::OFlag,
mount::MsFlags,
sys::signal::SigSet,
unistd::{dup2_stderr, dup2_stdin, dup2_stdout, getpid, getuid, setsid},
};
use nix::fcntl::OFlag;
use nix::mount::MsFlags;
use nix::sys::signal::SigSet;
use nix::unistd::{dup2_stderr, dup2_stdin, dup2_stdout, getpid, getuid, setsid};
use num_traits::AsPrimitive;
use std::fmt::Write as _;
use std::io::{BufReader, Write};
use std::os::fd::{AsFd, AsRawFd, IntoRawFd, RawFd};
use std::os::unix::net::{UCred, UnixListener, UnixStream};
use std::process::{Command, exit};
use std::sync::atomic::AtomicBool;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Mutex, OnceLock};
use std::time::Duration;
@@ -130,7 +128,9 @@ impl MagiskD {
info!("** zygote restarted");
self.prune_su_access();
scan_deny_apps();
self.zygisk.lock().unwrap().reset(false);
if self.zygisk_enabled.load(Ordering::Relaxed) {
self.zygisk.lock().unwrap().reset(false);
}
}
RequestCode::SQLITE_CMD => {
self.db_exec_for_cli(client).ok();
@@ -159,6 +159,7 @@ impl MagiskD {
.ok();
}
#[cfg(feature = "check-client")]
fn is_client(&self, pid: i32) -> bool {
let mut buf = cstr::buf::new::<32>();
write!(buf, "/proc/{pid}/exe").ok();
@@ -169,6 +170,11 @@ impl MagiskD {
}
}
#[cfg(not(feature = "check-client"))]
fn is_client(&self, pid: i32) -> bool {
true
}
fn handle_requests(&'static self, mut client: UnixStream) {
let Ok(cred) = client.peer_cred() else {
// Client died

View File

@@ -1,8 +0,0 @@
use proc_macro::TokenStream;
mod decodable;
#[proc_macro_derive(Decodable)]
pub fn derive_decodable(input: TokenStream) -> TokenStream {
decodable::derive_decodable(input)
}

View File

@@ -1,5 +1,4 @@
#![feature(try_blocks)]
#![feature(let_chains)]
#![feature(fn_traits)]
#![feature(unix_socket_ancillary_data)]
#![feature(unix_socket_peek)]
@@ -9,8 +8,8 @@
use crate::ffi::SuRequest;
use crate::socket::Encodable;
use base::derive::Decodable;
use daemon::{MagiskD, connect_daemon_for_cxx};
use derive::Decodable;
use logging::{android_logging, zygisk_close_logd, zygisk_get_logd, zygisk_logging};
use magisk::magisk_main;
use mount::revert_unmount;

View File

@@ -1,17 +1,16 @@
use crate::consts::{LOG_PIPE, LOGFILE};
use crate::ffi::get_magisk_tmp;
use crate::logging::LogFile::{Actual, Buffer};
use base::const_format::concatcp;
use base::{
FsPathBuilder, LogLevel, LoggedResult, ReadExt, Utf8CStr, Utf8CStrBuf, WriteExt,
const_format::concatcp, cstr, libc, new_daemon_thread, raw_cstr, update_logger,
FsPathBuilder, LogLevel, LoggedResult, ReadExt, ResultExt, Utf8CStr, Utf8CStrBuf, WriteExt,
cstr, libc, new_daemon_thread, raw_cstr, update_logger,
};
use bytemuck::{Pod, Zeroable, bytes_of, write_zeroes};
use libc::{PIPE_BUF, c_char, localtime_r, sigtimedwait, time_t, timespec, tm};
use nix::{
fcntl::OFlag,
sys::signal::{SigSet, SigmaskHow, Signal},
unistd::{Gid, Uid, chown, getpid, gettid},
};
use nix::fcntl::OFlag;
use nix::sys::signal::{SigSet, SigmaskHow, Signal};
use nix::unistd::{Gid, Uid, chown, getpid, gettid};
use num_derive::{FromPrimitive, ToPrimitive};
use num_traits::FromPrimitive;
use std::cmp::min;
@@ -328,7 +327,7 @@ pub fn start_log_daemon() {
}
let _: LoggedResult<()> = try {
path.mkfifo(0o666)?;
path.mkfifo(0o666).log_ok();
chown(path.as_utf8_cstr(), Some(Uid::from(0)), Some(Gid::from(0)))?;
let read = path.open(OFlag::O_RDWR | OFlag::O_CLOEXEC)?;
let write = path.open(OFlag::O_WRONLY | OFlag::O_CLOEXEC)?;

View File

@@ -5,7 +5,7 @@ use crate::mount::find_preinit_device;
use crate::selinux::restorecon;
use crate::socket::{Decodable, Encodable};
use argh::FromArgs;
use base::{CmdArgs, EarlyExitExt, LoggedResult, Utf8CString, clone_attr};
use base::{CmdArgs, EarlyExitExt, LoggedResult, Utf8CString, argh, clone_attr};
use nix::poll::{PollFd, PollFlags, PollTimeout};
use std::ffi::c_char;
use std::os::fd::AsFd;

View File

@@ -8,7 +8,9 @@ use base::{
Utf8CStrBuf, Utf8CString, WalkResult, clone_attr, cstr, debug, error, info, libc, raw_cstr,
warn,
};
use nix::{fcntl::OFlag, mount::MsFlags, unistd::UnlinkatFlags};
use nix::fcntl::OFlag;
use nix::mount::MsFlags;
use nix::unistd::UnlinkatFlags;
use std::collections::BTreeMap;
use std::os::fd::IntoRawFd;
use std::path::{Component, Path};

View File

@@ -6,12 +6,11 @@ use base::{
debug, info, libc, parse_mount_info, warn,
};
use libc::{c_uint, dev_t};
use nix::{
mount::MsFlags,
sys::stat::{Mode, SFlag, mknod},
};
use nix::mount::MsFlags;
use nix::sys::stat::{Mode, SFlag, mknod};
use num_traits::AsPrimitive;
use std::{cmp::Ordering::Greater, cmp::Ordering::Less, path::Path, path::PathBuf};
use std::cmp::Ordering::{Greater, Less};
use std::path::{Path, PathBuf};
pub fn setup_preinit_dir() {
let magisk_tmp = get_magisk_tmp();

View File

@@ -1,12 +1,12 @@
use super::{
PropInfo, PropReader, SYS_PROP,
persist::{persist_delete_prop, persist_get_all_props, persist_get_prop, persist_set_prop},
use super::persist::{
persist_delete_prop, persist_get_all_props, persist_get_prop, persist_set_prop,
};
use super::{PropInfo, PropReader, SYS_PROP};
use argh::{EarlyExit, FromArgs, MissingRequirements};
use base::libc::PROP_VALUE_MAX;
use base::{
BufReadExt, CmdArgs, EarlyExitExt, LogLevel, LoggedResult, ResultExt, Utf8CStr, Utf8CStrBuf,
Utf8CString, cstr, debug, log_err, set_log_level_state,
Utf8CString, argh, cstr, debug, log_err, set_log_level_state,
};
use nix::fcntl::OFlag;
use std::collections::BTreeMap;
@@ -17,21 +17,21 @@ use std::io::BufReader;
struct ResetProp {
#[argh(switch, short = 'v')]
verbose: bool,
#[argh(switch, short = 'w')]
#[argh(switch, short = 'w', long = none)]
wait_mode: bool,
#[argh(switch, short = 'p')]
#[argh(switch, short = 'p', long = none)]
persist: bool,
#[argh(switch, short = 'P')]
#[argh(switch, short = 'P', long = none)]
persist_only: bool,
#[argh(switch, short = 'Z')]
#[argh(switch, short = 'Z', long = none)]
context: bool,
#[argh(switch, short = 'n')]
#[argh(switch, short = 'n', long = none)]
skip_svc: bool,
#[argh(option, short = 'f')]
file: Option<Utf8CString>,
#[argh(option, long = "delete", short = 'd')]
#[argh(option, short = 'd', long = "delete")]
delete_key: Option<Utf8CString>,
#[argh(positional)]
#[argh(positional, greedy = true)]
args: Vec<Utf8CString>,
}
@@ -57,7 +57,7 @@ Wait mode arguments (toggled with -w):
General flags:
-h,--help show this message
-v print verbose output to stderr
-v,--verbose print verbose output to stderr
-w switch to wait mode
Read mode flags:

View File

@@ -1,20 +1,17 @@
use nix::fcntl::OFlag;
use quick_protobuf::{BytesReader, MessageRead, MessageWrite, Writer};
use std::io::Read;
use std::{
fs::File,
io::{BufWriter, Write},
os::fd::FromRawFd,
};
use std::fs::File;
use std::io::{BufWriter, Read, Write};
use std::os::fd::FromRawFd;
use crate::resetprop::PropReader;
use crate::resetprop::proto::persistent_properties::{
PersistentProperties, mod_PersistentProperties::PersistentPropertyRecord,
};
use crate::resetprop::proto::persistent_properties::PersistentProperties;
use crate::resetprop::proto::persistent_properties::mod_PersistentProperties::PersistentPropertyRecord;
use base::const_format::concatcp;
use base::libc::mkstemp;
use base::{
Directory, FsPathBuilder, LibcReturn, LoggedResult, MappedFile, SilentLogExt, Utf8CStr,
Utf8CStrBuf, WalkResult, clone_attr, cstr, debug, libc::mkstemp, log_err,
Utf8CStrBuf, WalkResult, clone_attr, cstr, debug, log_err,
};
const PERSIST_PROP_DIR: &str = "/data/property";

View File

@@ -8,14 +8,14 @@ use ExtraVal::{Bool, Int, IntList, Str};
use base::{
BytesExt, FileAttr, LibcReturn, LoggedResult, ResultExt, Utf8CStrBuf, cstr, fork_dont_care,
};
use nix::{
fcntl::OFlag,
poll::{PollFd, PollFlags, PollTimeout},
};
use nix::fcntl::OFlag;
use nix::poll::{PollFd, PollFlags, PollTimeout};
use num_traits::AsPrimitive;
use std::fmt::Write;
use std::fs::File;
use std::os::fd::AsFd;
use std::os::unix::net::UCred;
use std::{fmt::Write, fs::File, process::Command, process::exit};
use std::process::{Command, exit};
struct Extra<'a> {
key: &'static str,

View File

@@ -212,6 +212,7 @@ impl MagiskD {
info
}
#[cfg(feature = "su-check-db")]
fn build_su_info(&self, uid: i32) -> Arc<SuInfo> {
let result: LoggedResult<Arc<SuInfo>> = try {
let cfg = self.get_db_settings()?;
@@ -283,4 +284,9 @@ impl MagiskD {
result.unwrap_or(Arc::new(SuInfo::deny(uid)))
}
#[cfg(not(feature = "su-check-db"))]
fn build_su_info(&self, uid: i32) -> Arc<SuInfo> {
Arc::new(SuInfo::allow(uid))
}
}

View File

@@ -1,17 +1,15 @@
use base::{FileOrStd, LibcReturn, LoggedResult, OsResult, ResultExt, libc, warn};
use libc::{STDIN_FILENO, TIOCGWINSZ, TIOCSWINSZ, c_int, winsize};
use nix::{
fcntl::{OFlag, SpliceFFlags},
poll::{PollFd, PollFlags, PollTimeout, poll},
sys::signal::{SigSet, Signal, raise},
sys::signalfd::{SfdFlags, SignalFd},
sys::termios::{SetArg, Termios, cfmakeraw, tcgetattr, tcsetattr},
unistd::pipe2,
};
use nix::fcntl::{OFlag, SpliceFFlags};
use nix::poll::{PollFd, PollFlags, PollTimeout, poll};
use nix::sys::signal::{SigSet, Signal, raise};
use nix::sys::signalfd::{SfdFlags, SignalFd};
use nix::sys::termios::{SetArg, Termios, cfmakeraw, tcgetattr, tcsetattr};
use nix::unistd::pipe2;
use std::fs::File;
use std::io::{Read, Write};
use std::mem::MaybeUninit;
use std::os::fd::{AsFd, AsRawFd, FromRawFd, OwnedFd, RawFd};
use std::os::fd::{AsFd, AsRawFd, FromRawFd, RawFd};
use std::sync::atomic::{AtomicBool, Ordering};
static SHOULD_USE_SPLICE: AtomicBool = AtomicBool::new(true);
@@ -50,21 +48,25 @@ fn pump_via_copy(mut fd_in: &File, mut fd_out: &File) -> LoggedResult<()> {
Ok(())
}
fn pump_via_splice(fd_in: &File, fd_out: &File, pipe: &(OwnedFd, OwnedFd)) -> LoggedResult<()> {
if !SHOULD_USE_SPLICE.load(Ordering::Acquire) {
fn pump_via_splice(fd_in: &File, fd_out: &File, pipe: &(File, File)) -> LoggedResult<()> {
if !SHOULD_USE_SPLICE.load(Ordering::Relaxed) {
return pump_via_copy(fd_in, fd_out);
}
// The pipe capacity is by default 16 pages, let's just use 65536
let Ok(len) = splice(fd_in, &pipe.1, 65536) else {
// If splice failed, stop using splice and fallback to userspace copy
SHOULD_USE_SPLICE.store(false, Ordering::Release);
SHOULD_USE_SPLICE.store(false, Ordering::Relaxed);
return pump_via_copy(fd_in, fd_out);
};
if len == 0 {
return Ok(());
}
splice(&pipe.0, fd_out, len)?;
if splice(&pipe.0, fd_out, len).is_err() {
// If splice failed, stop using splice and fallback to userspace copy
SHOULD_USE_SPLICE.store(false, Ordering::Relaxed);
return pump_via_copy(&pipe.0, fd_out);
}
Ok(())
}
@@ -128,6 +130,7 @@ fn pump_tty_impl(ptmx: File, pump_stdin: bool) -> LoggedResult<()> {
// Open a pipe to bypass userspace copy with splice
let pipe_fd = pipe2(OFlag::O_CLOEXEC).into_os_result("pipe2", None, None)?;
let pipe_fd = (File::from(pipe_fd.0), File::from(pipe_fd.1));
'poll: loop {
// Wait for event
@@ -138,7 +141,7 @@ fn pump_tty_impl(ptmx: File, pump_stdin: bool) -> LoggedResult<()> {
if raw_fd == STDIN_FILENO {
pump_via_splice(FileOrStd::StdIn.as_file(), &ptmx, &pipe_fd)?;
} else if raw_fd == raw_ptmx {
pump_via_splice(&ptmx, FileOrStd::StdIn.as_file(), &pipe_fd)?;
pump_via_splice(&ptmx, FileOrStd::StdOut.as_file(), &pipe_fd)?;
} else if raw_fd == raw_sig {
sync_winsize(raw_ptmx);
signal_fd.as_ref().unwrap().read_signal()?;

View File

@@ -234,14 +234,11 @@ int su_client_main(int argc, char *argv[]) {
if (atty) {
// We need a PTY. Get one.
write_int(fd, 1);
int ptmx = recv_fd(fd);
setup_sighandlers(sighandler);
// If stdin is not a tty, and if we pump to ptmx, our process may intercept the input to ptmx and
// output to stdout, which cause the target process lost input.
pump_tty(ptmx, atty & ATTY_IN);
} else {
write_int(fd, 0);
}
// Get the exit code
@@ -335,9 +332,10 @@ void exec_root_shell(int client, int pid, SuRequest &req, MntNsMode mode) {
int infd = recv_fd(client);
int outfd = recv_fd(client);
int errfd = recv_fd(client);
int ptsfd = -1;
// App need a PTY
if (read_int(client)) {
if (infd < 0 || outfd < 0 || errfd < 0) {
string pts;
string ptmx;
auto magiskpts = get_magisk_tmp() + "/"s SHELLPTS;
@@ -370,24 +368,18 @@ void exec_root_shell(int client, int pid, SuRequest &req, MntNsMode mode) {
// Opening the TTY has to occur after the
// fork() and setsid() so that it becomes
// our controlling TTY and not the daemon's
int ptsfd = xopen(pts_slave.data(), O_RDWR);
if (infd < 0)
infd = ptsfd;
if (outfd < 0)
outfd = ptsfd;
if (errfd < 0)
errfd = ptsfd;
ptsfd = xopen(pts_slave.data(), O_RDWR);
}
// Swap out stdin, stdout, stderr
xdup2(infd, STDIN_FILENO);
xdup2(outfd, STDOUT_FILENO);
xdup2(errfd, STDERR_FILENO);
xdup2(infd < 0 ? ptsfd : infd, STDIN_FILENO);
xdup2(outfd < 0 ? ptsfd : outfd, STDOUT_FILENO);
xdup2(errfd < 0 ? ptsfd : errfd, STDERR_FILENO);
close(infd);
close(outfd);
close(errfd);
close(ptsfd);
close(client);
// Handle namespaces

View File

@@ -1,5 +1,6 @@
use base::{ResultExt, new_daemon_thread};
use nix::{sys::signal::SigSet, unistd::getpid, unistd::gettid};
use nix::sys::signal::SigSet;
use nix::unistd::{getpid, gettid};
use std::sync::{Condvar, LazyLock, Mutex, WaitTimeoutResult};
use std::time::Duration;

View File

@@ -208,6 +208,12 @@ bool ZygiskContext::plt_hook_commit() {
{
mutex_guard lock(hook_info_lock);
plt_hook_process_regex();
for (auto& reg: register_info) {
regfree(&reg.regex);
}
for (auto& ign: ignore_info) {
regfree(&ign.regex);
}
register_info.clear();
ignore_info.clear();
}

View File

@@ -11,7 +11,7 @@ path = "lib.rs"
cxx-gen = { workspace = true }
[dependencies]
base = { path = "../base" }
magiskpolicy = { path = "../sepolicy", default-features = false }
base = { workspace = true }
magiskpolicy = { workspace = true, features = ["no-main"] }
cxx = { workspace = true }
num-traits = { workspace = true }

View File

@@ -1,19 +1,11 @@
use crate::ffi::backup_init;
use crate::ffi::{BootConfig, MagiskInit, backup_init, magisk_proxy_main};
use crate::logging::setup_klog;
use crate::mount::is_rootfs;
use crate::twostage::hexpatch_init_for_second_stage;
use crate::{
ffi::{BootConfig, MagiskInit, magisk_proxy_main},
logging::setup_klog,
};
use base::{
LibcReturn, LoggedResult, ResultExt, cstr, info,
libc::{basename, getpid, mount, umask},
raw_cstr,
};
use std::{
ffi::{CStr, c_char},
ptr::null,
};
use base::libc::{basename, getpid, mount, umask};
use base::{LibcReturn, LoggedResult, ResultExt, cstr, info, raw_cstr};
use std::ffi::{CStr, c_char};
use std::ptr::null;
impl MagiskInit {
fn new(argv: *mut *mut c_char) -> Self {

View File

@@ -4,17 +4,13 @@ use base::{
nix, parse_mount_info, raw_cstr,
};
use cxx::CxxString;
use nix::{
mount::MsFlags,
sys::statfs::{FsType, TMPFS_MAGIC, statfs},
unistd::{chdir, chroot},
};
use nix::mount::MsFlags;
use nix::sys::statfs::{FsType, TMPFS_MAGIC, statfs};
use nix::unistd::{chdir, chroot};
use num_traits::AsPrimitive;
use std::{
collections::BTreeSet,
ops::Bound::{Excluded, Unbounded},
pin::Pin,
};
use std::collections::BTreeSet;
use std::ops::Bound::{Excluded, Unbounded};
use std::pin::Pin;
unsafe extern "C" {
static environ: *const *mut libc::c_char;

View File

@@ -5,13 +5,10 @@ use base::{
BufReadExt, Directory, FsPathBuilder, LoggedResult, ResultExt, Utf8CStr, Utf8CString,
clone_attr, cstr, debug,
};
use std::io::BufReader;
use std::{
fs::File,
io::Write,
mem,
os::fd::{FromRawFd, RawFd},
};
use std::fs::File;
use std::io::{BufReader, Write};
use std::mem;
use std::os::fd::{FromRawFd, RawFd};
pub fn inject_magisk_rc(fd: RawFd, tmp_dir: &Utf8CStr) {
debug!("Injecting magisk rc");

2
native/src/rustfmt.toml Normal file
View File

@@ -0,0 +1,2 @@
unstable_features = true
imports_granularity = "Module"

View File

@@ -11,10 +11,8 @@ path = "lib.rs"
cxx-gen = { workspace = true }
[features]
default = ["main"]
main = []
no-main = []
[dependencies]
base = { path = "../base" }
base = { workspace = true }
cxx = { workspace = true }
argh = { workspace = true }

View File

@@ -1,9 +1,10 @@
use crate::ffi::SePolicy;
use crate::statement::format_statement_help;
use argh::FromArgs;
use base::libc::umask;
use base::{
CmdArgs, EarlyExitExt, FmtAdaptor, LoggedResult, Utf8CString, cmdline_logging, cstr,
libc::umask, log_err,
CmdArgs, EarlyExitExt, FmtAdaptor, LoggedResult, Utf8CString, argh, cmdline_logging, cstr,
log_err,
};
use std::ffi::c_char;
use std::io::stderr;

View File

@@ -8,7 +8,7 @@ use crate::ffi::SePolicy;
#[path = "../include/consts.rs"]
mod consts;
#[cfg(feature = "main")]
#[cfg(not(feature = "no-main"))]
mod cli;
mod rules;
mod statement;

View File

@@ -7,6 +7,7 @@
#include <cil/cil.h>
#include <base.hpp>
#include <flags.h>
using namespace std;

View File

@@ -1,5 +1,6 @@
use crate::SePolicy;
use crate::consts::{SEPOL_FILE_TYPE, SEPOL_LOG_TYPE, SEPOL_PROC_DOMAIN};
use crate::{SePolicy, ffi::Xperm};
use crate::ffi::Xperm;
use base::{LogLevel, set_log_level_state};
macro_rules! rules {

View File

@@ -1,10 +1,12 @@
use std::fmt::{Display, Formatter, Write};
use std::io::{BufRead, BufReader, Cursor};
use std::{iter::Peekable, vec::IntoIter};
use std::iter::Peekable;
use std::vec::IntoIter;
use crate::SePolicy;
use crate::ffi::Xperm;
use base::{BufReadExt, LoggedResult, Utf8CStr, error, nix::fcntl::OFlag, warn};
use base::nix::fcntl::OFlag;
use base::{BufReadExt, LoggedResult, Utf8CStr, error, warn};
pub enum Token<'a> {
AL,

View File

@@ -32,17 +32,17 @@ case $(uname -m) in
esac
cleanup() {
pkill -INT -P $$
wait
trap - EXIT
rm -f magisk_*.img
"$avd" delete avd -n test
exit 1
}
test_error() {
trap - EXIT
print_error "! An error occurred"
pkill -INT -P $$
wait
cleanup
exit 1
}
wait_for_boot() {
@@ -72,13 +72,14 @@ wait_emu() {
dump_vars() {
local val
for name in $@; do
for name in $@ emu_args; do
eval val=\$$name
echo $name=\"$val\"\;
done
}
resolve_vars() {
set +x
local arg_list="$1"
local ver=$2
local type=$3
@@ -86,7 +87,7 @@ resolve_vars() {
# Determine API level
local api
case $ver in
+([0-9])) api=$ver ;;
+([0-9\.])) api=$ver ;;
TiramisuPrivacySandbox) api=33 ;;
UpsideDownCakePrivacySandbox) api=34 ;;
VanillaIceCream) api=35 ;;
@@ -100,10 +101,10 @@ resolve_vars() {
# Determine default image type
if [ -z $type ]; then
if [ $api -ge $atd_min_api -a $api -le $atd_max_api ]; then
if [ $(bc <<< "$api >= $atd_min_api && $api <= $atd_max_api") = 1 ]; then
# Use the lightweight ATD images if possible
type='aosp_atd'
elif [ $api -gt $atd_max_api ]; then
elif [ $(bc <<< "$api > $atd_max_api") = 1 ]; then
# Preview/beta release, no AOSP version available
type='google_apis'
else
@@ -113,7 +114,7 @@ resolve_vars() {
# Old Linux kernels will not boot with memory larger than 3GB
local memory
if [ $api -lt $huge_ram_min_api ]; then
if [ $(bc <<< "$api < $huge_ram_min_api") = 1 ]; then
memory=3072
else
memory=8192
@@ -138,8 +139,14 @@ dl_emu() {
setup_emu() {
local avd_pkg=$1
local ver=$2
dl_emu $avd_pkg
echo no | "$avd" create avd -f -n test -k $avd_pkg
# avdmanager is outdated, it might not set the proper target
local ini=$ANDROID_AVD_HOME/test.ini
sed "s:^target\s*=.*:target=android-$ver:g" $ini > $ini.new
mv $ini.new $ini
}
test_emu() {
@@ -169,15 +176,15 @@ test_emu() {
}
test_main() {
local avd_pkg ramdisk
eval $(resolve_vars "emu_args avd_pkg ramdisk" $1 $2)
local ver avd_pkg ramdisk
eval $(resolve_vars "ver avd_pkg ramdisk" $1 $2)
# Specify an explicit port so that tests can run with other emulators running at the same time
local emu_port=5682
emu_args="$emu_args -port $emu_port"
export ANDROID_SERIAL="emulator-$emu_port"
setup_emu "$avd_pkg"
setup_emu "$avd_pkg" $ver
# Restart ADB daemon just in case
adb kill-server
@@ -210,17 +217,16 @@ test_main() {
test_emu release
fi
# Cleanup
rm -f magisk_*.img
"$avd" delete avd -n test
cleanup
}
run_main() {
local avd_pkg
eval $(resolve_vars "emu_args avd_pkg" $1 $2)
setup_emu "$avd_pkg"
local ver avd_pkg
eval $(resolve_vars "ver avd_pkg" $1 $2)
setup_emu "$avd_pkg" $ver
print_title "* Launching $avd_pkg"
"$emu" @test $emu_args 2>/dev/null
cleanup
}
dl_main() {

View File

@@ -36,25 +36,6 @@ disable_version_config() {
sed -i "s:^version=:# version=:g" $CONFIG
}
bump_canary_version() {
# Update version code
local code=$(grep_prop magisk.versionCode $GCONFIG)
code=$((code + 1))
local tag="canary-$code"
sed -i "s:versionCode=.*:versionCode=${code}:g" $GCONFIG
# Commit version code changes
git add -u .
git status
git commit -m "Release new canary build" -m "[skip ci]"
git tag $tag
# Update version name
local ver=$(git rev-parse --short=8 HEAD)
sed -i "s:version=.*:version=${ver}:g" $CONFIG
sed -i "1s:.*:## Magisk (${ver}) (${code}):" $NOTES
}
# $1 = ver
set_version() {
local ver=$1
@@ -69,89 +50,45 @@ set_version() {
git add -u .
git status
git commit -m "Release Magisk v$ver" -m "[skip ci]"
git tag $tag
}
build_apk() {
# $1 = ver
build() {
[ -z $1 ] && exit 1
local ver=$1
git pull
set_version $ver
$BUILDCMD clean
$BUILDCMD all
$BUILDCMD -r all
}
build_canary() {
bump_canary_version
build_apk
}
# $1 = ver
build_public() {
[ -z $1 ] && exit 1
local ver=$1
set_version $ver
build_apk
}
upload() {
# Verify pattern
[[ "$1" =~ canary|beta|stable ]]
local type=$1
gh auth status
local latest_tag=$(git describe --abbrev=0 --tags)
local ver=$(grep_prop version $CONFIG)
local code=$(grep_prop magisk.versionCode $GCONFIG)
local out=$(grep_prop outdir $CONFIG)
local tag title
local ver=$(echo - | awk "{ print $code / 1000 }")
local tag="v$ver"
local title="Magisk v$ver"
local out=$(grep_prop outdir $CONFIG)
if [ -z $out ]; then
out=out
fi
git tag $tag
git push origin master
git push --tags
# Prepare release notes
tail -n +3 $NOTES > release.md
case $type in
canary )
tag="canary-$code"
title="Magisk ($ver) ($code)"
# Publish release
local release_apk="Magisk-v${ver}.apk"
cp $out/app-release.apk $release_apk
gh release create --verify-tag $tag -p -t "$title" -F release.md $release_apk $out/app-debug.apk $NOTES
# Assert tag format
[ $latest_tag = $tag ]
# Publish release
gh release create --verify-tag $tag -p -t "$title" -F release.md $out/app-release.apk $out/app-debug.apk $NOTES
;;
beta|stable )
tag="v$ver"
title="Magisk v$ver"
# Assert tag format
[ $latest_tag = $tag ]
# Publish release
local release_apk="Magisk-v${ver}.apk"
cp $out/app-release.apk $release_apk
gh release create --verify-tag $tag -p -t "$title" -F release.md $release_apk $out/app-debug.apk $NOTES
rm -f $release_apk
;;
esac
# If publishing stable, make it not prerelease and explicitly latest
if [ $type = "stable" ]; then
gh release edit $tag --prerelease=false --latest
fi
rm -f release.md
}
revert() {
local latest_tag=$(git describe --abbrev=0 --tags)
git tag -d $latest_tag
git reset --hard HEAD~
rm -f $release_apk release.md
}
# Use GNU sed on macOS
@@ -160,14 +97,10 @@ if command -v gsed >/dev/null; then
export -f sed
fi
git pull
trap disable_version_config EXIT
ensure_config
case $1 in
canary ) build_canary ;;
public ) build_public $2 ;;
upload ) upload $2 ;;
revert ) revert ;;
build ) build $2 ;;
upload ) upload ;;
* ) exit 1 ;;
esac

View File

@@ -24,7 +24,7 @@ print_title() {
}
print_error() {
echo -e "\n\033[41;39m${1}\033[0m\n"
echo -e "\n\033[41;39m${1}\033[0m\n" >&2
}
# $1 = TestClass#method