mirror of
https://github.com/topjohnwu/Magisk.git
synced 2026-01-23 01:39:07 -08:00
Compare commits
49 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
92f110385a | ||
|
|
0919db6b11 | ||
|
|
18c1347bd3 | ||
|
|
0bbc736051 | ||
|
|
01cb75eaef | ||
|
|
33eaa7c5eb | ||
|
|
8734423cb0 | ||
|
|
da9e72b2a2 | ||
|
|
ff4ca74cfe | ||
|
|
200665c48a | ||
|
|
dd42aa99ea | ||
|
|
0936cdb192 | ||
|
|
871643dce2 | ||
|
|
a510554b21 | ||
|
|
9cc830c565 | ||
|
|
ddbac50645 | ||
|
|
b5138a4af0 | ||
|
|
64752f38e8 | ||
|
|
9ac4b5ce7d | ||
|
|
505053f9b4 | ||
|
|
ccb264f33a | ||
|
|
84f7d75d30 | ||
|
|
9a776c22d9 | ||
|
|
363566d0d5 | ||
|
|
d9dc459bf4 | ||
|
|
5d6b703622 | ||
|
|
f7ce9c38e1 | ||
|
|
bdbfb40383 | ||
|
|
283fc0f46f | ||
|
|
2c24a41bf2 | ||
|
|
97c93a1f4d | ||
|
|
8d534e6de8 | ||
|
|
3a60ef2039 | ||
|
|
52d7eff03f | ||
|
|
020e23ea13 | ||
|
|
1599bfc2c5 | ||
|
|
c8d51b38ba | ||
|
|
f741a4aeb8 | ||
|
|
4ee2235961 | ||
|
|
536e50c6e0 | ||
|
|
57d9fc6099 | ||
|
|
52d8910bdd | ||
|
|
c94bd49a89 | ||
|
|
b72ba6759e | ||
|
|
5bcb55b7fc | ||
|
|
0dc8231585 | ||
|
|
470acc93c9 | ||
|
|
0edb80b10f | ||
|
|
bcc6296d94 |
2
.github/workflows/build.yml
vendored
2
.github/workflows/build.yml
vendored
@@ -82,7 +82,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
version: [23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, "CANARY"]
|
||||
version: [23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 36.1, "CANARY"]
|
||||
type: [""]
|
||||
include:
|
||||
- version: "CANARY"
|
||||
|
||||
2
app/.gitignore
vendored
2
app/.gitignore
vendored
@@ -3,5 +3,5 @@
|
||||
# Gradle
|
||||
.gradle
|
||||
.kotlin
|
||||
build
|
||||
/local.properties
|
||||
/build
|
||||
|
||||
1
app/apk/.gitignore
vendored
1
app/apk/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
/build
|
||||
@@ -1,8 +1,7 @@
|
||||
plugins {
|
||||
id("com.android.application")
|
||||
kotlin("android")
|
||||
kotlin("plugin.parcelize")
|
||||
kotlin("kapt")
|
||||
id("com.android.legacy-kapt")
|
||||
id("androidx.navigation.safeargs.kotlin")
|
||||
}
|
||||
|
||||
@@ -26,6 +25,10 @@ android {
|
||||
isCoreLibraryDesugaringEnabled = true
|
||||
}
|
||||
|
||||
defaultConfig {
|
||||
proguardFile("proguard-rules.pro")
|
||||
}
|
||||
|
||||
buildTypes {
|
||||
release {
|
||||
isMinifyEnabled = true
|
||||
|
||||
3
app/apk/proguard-rules.pro
vendored
Normal file
3
app/apk/proguard-rules.pro
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# Excessive obfuscation
|
||||
-flattenpackagehierarchy
|
||||
-allowaccessmodification
|
||||
@@ -27,10 +27,8 @@
|
||||
isEnabled="@{!item.removed && item.enabled && !item.showNotice}"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:clickable="@{!item.removed && item.enabled && !item.showNotice}"
|
||||
android:focusable="@{!item.removed && item.enabled && !item.showNotice}"
|
||||
android:nextFocusRight="@id/module_indicator"
|
||||
android:onClick="@{() -> item.setEnabled(!item.enabled)}"
|
||||
app:cardBackgroundColor="@color/color_card_background_color_selector"
|
||||
tools:isEnabled="false"
|
||||
tools:layout_gravity="center"
|
||||
|
||||
1
app/buildSrc/.gitignore
vendored
1
app/buildSrc/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
/build
|
||||
@@ -1,5 +1,3 @@
|
||||
import org.jetbrains.kotlin.gradle.dsl.KotlinVersion
|
||||
|
||||
plugins {
|
||||
`kotlin-dsl`
|
||||
}
|
||||
@@ -21,6 +19,7 @@ gradlePlugin {
|
||||
dependencies {
|
||||
implementation(kotlin("gradle-plugin", libs.versions.kotlin.get()))
|
||||
implementation(libs.android.gradle.plugin)
|
||||
implementation(libs.android.kapt.plugin)
|
||||
implementation(libs.ksp.plugin)
|
||||
implementation(libs.navigation.safe.args.plugin)
|
||||
implementation(libs.lsparanoid.plugin)
|
||||
|
||||
@@ -46,11 +46,25 @@ class MagiskPlugin : Plugin<Project> {
|
||||
private fun Project.applyPlugin() {
|
||||
initRandom(rootProject.file("dict.txt"))
|
||||
props.clear()
|
||||
rootProject.file("gradle.properties").inputStream().use { props.load(it) }
|
||||
|
||||
// Get gradle properties relevant to Magisk
|
||||
props.putAll(properties.filter { (key, _) -> key.startsWith("magisk.") })
|
||||
|
||||
// Load config.prop
|
||||
val configPath: String? by this
|
||||
val config = rootFile(configPath ?: "config.prop")
|
||||
if (config.exists())
|
||||
config.inputStream().use { props.load(it) }
|
||||
val configFile = rootFile(configPath ?: "config.prop")
|
||||
if (configFile.exists()) {
|
||||
configFile.inputStream().use {
|
||||
val config = Properties()
|
||||
config.load(it)
|
||||
// Remove properties that should be passed by commandline
|
||||
config.remove("abiList")
|
||||
props.putAll(config)
|
||||
}
|
||||
}
|
||||
|
||||
// Commandline override
|
||||
findProperty("abiList")?.let { props.put("abiList", it) }
|
||||
|
||||
val repo = FileRepository(rootFile(".git"))
|
||||
val refId = repo.refDatabase.exactRef("HEAD").objectId
|
||||
|
||||
@@ -1,72 +1,68 @@
|
||||
import com.android.build.api.artifact.SingleArtifact
|
||||
import com.android.build.api.dsl.ApplicationExtension
|
||||
import com.android.build.api.dsl.CommonExtension
|
||||
import com.android.build.api.instrumentation.FramesComputationMode.COMPUTE_FRAMES_FOR_INSTRUMENTED_METHODS
|
||||
import com.android.build.api.instrumentation.InstrumentationScope
|
||||
import com.android.build.api.variant.AndroidComponentsExtension
|
||||
import com.android.build.api.variant.ApplicationAndroidComponentsExtension
|
||||
import com.android.build.gradle.BaseExtension
|
||||
import com.android.build.gradle.LibraryExtension
|
||||
import com.android.build.gradle.internal.dsl.BaseAppModuleExtension
|
||||
import org.apache.tools.ant.filters.FixCrLfFilter
|
||||
import org.gradle.api.Action
|
||||
import org.gradle.api.JavaVersion
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.tasks.Copy
|
||||
import org.gradle.api.tasks.Delete
|
||||
import org.gradle.api.file.DirectoryProperty
|
||||
import org.gradle.api.tasks.OutputDirectory
|
||||
import org.gradle.api.tasks.StopExecutionException
|
||||
import org.gradle.api.tasks.Sync
|
||||
import org.gradle.kotlin.dsl.assign
|
||||
import org.gradle.kotlin.dsl.exclude
|
||||
import org.gradle.kotlin.dsl.filter
|
||||
import org.gradle.kotlin.dsl.get
|
||||
import org.gradle.kotlin.dsl.getValue
|
||||
import org.gradle.kotlin.dsl.named
|
||||
import org.gradle.kotlin.dsl.provideDelegate
|
||||
import org.gradle.kotlin.dsl.register
|
||||
import org.gradle.kotlin.dsl.withType
|
||||
import org.jetbrains.kotlin.gradle.dsl.JvmTarget
|
||||
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
|
||||
import java.io.ByteArrayOutputStream
|
||||
import java.io.File
|
||||
import java.net.URI
|
||||
import java.security.MessageDigest
|
||||
import java.util.HexFormat
|
||||
import java.util.zip.Deflater
|
||||
import java.util.zip.DeflaterOutputStream
|
||||
import java.util.zip.ZipEntry
|
||||
import java.util.zip.ZipFile
|
||||
import java.util.zip.ZipOutputStream
|
||||
|
||||
private fun Project.androidBase(configure: Action<BaseExtension>) =
|
||||
private fun Project.android(configure: Action<CommonExtension>) =
|
||||
extensions.configure("android", configure)
|
||||
|
||||
private fun Project.android(configure: Action<BaseAppModuleExtension>) =
|
||||
private fun Project.androidApp(configure: Action<ApplicationExtension>) =
|
||||
extensions.configure("android", configure)
|
||||
|
||||
internal val Project.androidApp: BaseAppModuleExtension
|
||||
get() = extensions["android"] as BaseAppModuleExtension
|
||||
internal val Project.androidApp: ApplicationExtension
|
||||
get() = extensions["android"] as ApplicationExtension
|
||||
|
||||
private val Project.androidLib: LibraryExtension
|
||||
get() = extensions["android"] as LibraryExtension
|
||||
private fun Project.androidComponents(configure: Action<AndroidComponentsExtension<*, *, *>>) =
|
||||
extensions.configure(AndroidComponentsExtension::class.java, configure)
|
||||
|
||||
internal val Project.androidComponents
|
||||
get() = extensions.getByType(ApplicationAndroidComponentsExtension::class.java)
|
||||
private val Project.androidComponents: AndroidComponentsExtension<*, *, *>
|
||||
get() = extensions["androidComponents"] as AndroidComponentsExtension<*, *, *>
|
||||
|
||||
internal fun Project.androidAppComponents(configure: Action<ApplicationAndroidComponentsExtension>) =
|
||||
extensions.configure(ApplicationAndroidComponentsExtension::class.java, configure)
|
||||
|
||||
fun Project.setupCommon() {
|
||||
androidBase {
|
||||
compileSdkVersion(36)
|
||||
android {
|
||||
compileSdk {
|
||||
version = release(36)
|
||||
}
|
||||
buildToolsVersion = "36.0.0"
|
||||
ndkPath = "$sdkDirectory/ndk/magisk"
|
||||
ndkVersion = "29.0.13846066"
|
||||
ndkPath = "${androidComponents.sdkComponents.sdkDirectory.get().asFile}/ndk/magisk"
|
||||
ndkVersion = "29.0.14206865"
|
||||
|
||||
defaultConfig {
|
||||
defaultConfig.apply {
|
||||
minSdk = 23
|
||||
}
|
||||
|
||||
compileOptions {
|
||||
compileOptions.apply {
|
||||
sourceCompatibility = JavaVersion.VERSION_21
|
||||
targetCompatibility = JavaVersion.VERSION_21
|
||||
}
|
||||
|
||||
packagingOptions {
|
||||
packaging.apply {
|
||||
resources {
|
||||
excludes += arrayOf(
|
||||
"/META-INF/*",
|
||||
@@ -123,93 +119,108 @@ const val BUSYBOX_DOWNLOAD_URL =
|
||||
const val BUSYBOX_ZIP_CHECKSUM =
|
||||
"b4d0551feabaf314e53c79316c980e8f66432e9fb91a69dbbf10a93564b40951"
|
||||
|
||||
private abstract class SyncWithDir : Sync() {
|
||||
@get:OutputDirectory
|
||||
abstract val outputFolder: DirectoryProperty
|
||||
}
|
||||
|
||||
fun Project.setupCoreLib() {
|
||||
setupCommon()
|
||||
|
||||
androidLib.libraryVariants.all {
|
||||
val variant = name
|
||||
val variantCapped = name.replaceFirstChar { it.uppercase() }
|
||||
val abiList = Config.abiList
|
||||
val abiList = Config.abiList
|
||||
|
||||
val syncLibs = tasks.register("sync${variantCapped}JniLibs", Sync::class) {
|
||||
into("src/$variant/jniLibs")
|
||||
for (abi in abiList) {
|
||||
into(abi) {
|
||||
from(rootFile("native/out/$abi")) {
|
||||
include("magiskboot", "magiskinit", "magiskpolicy", "magisk", "libinit-ld.so")
|
||||
rename { if (it.endsWith(".so")) it else "lib$it.so" }
|
||||
androidComponents {
|
||||
onVariants { variant ->
|
||||
val variantName = variant.name
|
||||
val variantCapped = variantName.replaceFirstChar { it.uppercase() }
|
||||
|
||||
val syncLibs = tasks.register("sync${variantCapped}JniLibs", SyncWithDir::class) {
|
||||
outputFolder.set(layout.buildDirectory.dir("$variantName/jniLibs"))
|
||||
into(outputFolder)
|
||||
|
||||
for (abi in abiList) {
|
||||
into(abi) {
|
||||
from(rootFile("native/out/$abi")) {
|
||||
include("magiskboot", "magiskinit", "magiskpolicy", "magisk", "libinit-ld.so")
|
||||
rename { if (it.endsWith(".so")) it else "lib$it.so" }
|
||||
}
|
||||
}
|
||||
}
|
||||
from(zipTree(downloadFile(BUSYBOX_DOWNLOAD_URL, BUSYBOX_ZIP_CHECKSUM)))
|
||||
include(abiList.map { "$it/libbusybox.so" })
|
||||
onlyIf {
|
||||
if (inputs.sourceFiles.files.size != abiList.size * 6)
|
||||
throw StopExecutionException("Please build binaries first! (./build.py binary)")
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
variant.sources.jniLibs?.let {
|
||||
it.addGeneratedSourceDirectory(syncLibs, SyncWithDir::outputFolder)
|
||||
}
|
||||
|
||||
val syncResources = tasks.register("sync${variantCapped}Resources", SyncWithDir::class) {
|
||||
outputFolder.set(layout.buildDirectory.dir("$variantName/resources"))
|
||||
into(outputFolder)
|
||||
|
||||
into("META-INF/com/google/android") {
|
||||
from(rootFile("scripts/update_binary.sh")) {
|
||||
rename { "update-binary" }
|
||||
}
|
||||
from(rootFile("scripts/flash_script.sh")) {
|
||||
rename { "updater-script" }
|
||||
}
|
||||
}
|
||||
}
|
||||
from(zipTree(downloadFile(BUSYBOX_DOWNLOAD_URL, BUSYBOX_ZIP_CHECKSUM)))
|
||||
include(abiList.map { "$it/libbusybox.so" })
|
||||
onlyIf {
|
||||
if (inputs.sourceFiles.files.size != abiList.size * 6)
|
||||
throw StopExecutionException("Please build binaries first! (./build.py binary)")
|
||||
true
|
||||
|
||||
variant.sources.resources?.let {
|
||||
it.addGeneratedSourceDirectory(syncResources, SyncWithDir::outputFolder)
|
||||
}
|
||||
}
|
||||
|
||||
tasks.getByPath("merge${variantCapped}JniLibFolders").dependsOn(syncLibs)
|
||||
val stubTask = tasks.getByPath(":stub:comment$variantCapped")
|
||||
val syncAssets = tasks.register("sync${variantCapped}Assets", SyncWithDir::class) {
|
||||
outputFolder.set(layout.buildDirectory.dir("$variantName/assets"))
|
||||
into(outputFolder)
|
||||
|
||||
val syncResources = tasks.register("sync${variantCapped}Resources", Sync::class) {
|
||||
into("src/$variant/resources/META-INF/com/google/android")
|
||||
from(rootFile("scripts/update_binary.sh")) {
|
||||
rename { "update-binary" }
|
||||
}
|
||||
from(rootFile("scripts/flash_script.sh")) {
|
||||
rename { "updater-script" }
|
||||
}
|
||||
}
|
||||
|
||||
processJavaResourcesProvider.configure { dependsOn(syncResources) }
|
||||
|
||||
val stubTask = tasks.getByPath(":stub:comment$variantCapped")
|
||||
val stubApk = stubTask.outputs.files.asFileTree.filter {
|
||||
it.name.endsWith(".apk")
|
||||
}
|
||||
|
||||
val syncAssets = tasks.register("sync${variantCapped}Assets", Sync::class) {
|
||||
dependsOn(stubTask)
|
||||
inputs.property("version", Config.version)
|
||||
inputs.property("versionCode", Config.versionCode)
|
||||
into("src/$variant/assets")
|
||||
from(rootFile("scripts")) {
|
||||
include("util_functions.sh", "boot_patch.sh", "addon.d.sh",
|
||||
"app_functions.sh", "uninstaller.sh", "module_installer.sh")
|
||||
}
|
||||
from(rootFile("tools/bootctl"))
|
||||
into("chromeos") {
|
||||
from(rootFile("tools/futility"))
|
||||
from(rootFile("tools/keys")) {
|
||||
include("kernel_data_key.vbprivk", "kernel.keyblock")
|
||||
inputs.property("version", Config.version)
|
||||
inputs.property("versionCode", Config.versionCode)
|
||||
from(rootFile("scripts")) {
|
||||
include("util_functions.sh", "boot_patch.sh", "addon.d.sh",
|
||||
"app_functions.sh", "uninstaller.sh", "module_installer.sh")
|
||||
}
|
||||
from(rootFile("tools/bootctl"))
|
||||
into("chromeos") {
|
||||
from(rootFile("tools/futility"))
|
||||
from(rootFile("tools/keys")) {
|
||||
include("kernel_data_key.vbprivk", "kernel.keyblock")
|
||||
}
|
||||
}
|
||||
from(stubTask) {
|
||||
include { it.name.endsWith(".apk") }
|
||||
rename { "stub.apk" }
|
||||
}
|
||||
filesMatching("**/util_functions.sh") {
|
||||
filter {
|
||||
it.replace(
|
||||
"#MAGISK_VERSION_STUB",
|
||||
"MAGISK_VER='${Config.version}'\nMAGISK_VER_CODE=${Config.versionCode}"
|
||||
)
|
||||
}
|
||||
filter<FixCrLfFilter>("eol" to FixCrLfFilter.CrLf.newInstance("lf"))
|
||||
}
|
||||
}
|
||||
from(stubApk) {
|
||||
rename { "stub.apk" }
|
||||
}
|
||||
filesMatching("**/util_functions.sh") {
|
||||
filter {
|
||||
it.replace(
|
||||
"#MAGISK_VERSION_STUB",
|
||||
"MAGISK_VER='${Config.version}'\nMAGISK_VER_CODE=${Config.versionCode}"
|
||||
)
|
||||
}
|
||||
filter<FixCrLfFilter>("eol" to FixCrLfFilter.CrLf.newInstance("lf"))
|
||||
|
||||
variant.sources.assets?.let {
|
||||
it.addGeneratedSourceDirectory(syncAssets, SyncWithDir::outputFolder)
|
||||
}
|
||||
}
|
||||
mergeAssetsProvider.configure { dependsOn(syncAssets) }
|
||||
}
|
||||
|
||||
tasks.named<Delete>("clean") {
|
||||
delete.addAll(listOf("src/main/jniLibs", "src/main/resources", "src/debug", "src/release"))
|
||||
}
|
||||
}
|
||||
|
||||
fun Project.setupAppCommon() {
|
||||
setupCommon()
|
||||
|
||||
android {
|
||||
androidApp {
|
||||
signingConfigs {
|
||||
Config["keyStore"]?.also {
|
||||
create("config") {
|
||||
@@ -254,22 +265,25 @@ fun Project.setupAppCommon() {
|
||||
}
|
||||
}
|
||||
|
||||
androidComponents.onVariants { variant ->
|
||||
val commentTask = tasks.register(
|
||||
"comment${variant.name.replaceFirstChar { it.uppercase() }}",
|
||||
AddCommentTask::class.java
|
||||
)
|
||||
val transformationRequest = variant.artifacts.use(commentTask)
|
||||
.wiredWithDirectories(AddCommentTask::apkFolder, AddCommentTask::outFolder)
|
||||
.toTransformMany(SingleArtifact.APK)
|
||||
val signingConfig = androidApp.buildTypes.getByName(variant.buildType!!).signingConfig
|
||||
commentTask.configure {
|
||||
this.transformationRequest = transformationRequest
|
||||
this.signingConfig = signingConfig
|
||||
this.comment = "version=${Config.version}\n" +
|
||||
"versionCode=${Config.versionCode}\n" +
|
||||
"stubVersion=${Config.stubVersion}\n"
|
||||
this.outFolder.set(layout.buildDirectory.dir("outputs/apk/${variant.name}"))
|
||||
androidAppComponents {
|
||||
onVariants { variant ->
|
||||
val commentTask = tasks.register(
|
||||
"comment${variant.name.replaceFirstChar { it.uppercase() }}",
|
||||
AddCommentTask::class.java
|
||||
)
|
||||
val transformationRequest = variant.artifacts.use(commentTask)
|
||||
.wiredWithDirectories(AddCommentTask::apkFolder, AddCommentTask::outFolder)
|
||||
.toTransformMany(SingleArtifact.APK)
|
||||
val signingConfig = androidApp.buildTypes.getByName(variant.buildType!!).signingConfig
|
||||
commentTask.configure {
|
||||
this.transformationRequest = transformationRequest
|
||||
this.signingConfig = signingConfig
|
||||
this.comment = "version=${Config.version}\n" +
|
||||
"versionCode=${Config.versionCode}\n" +
|
||||
"stubVersion=${Config.stubVersion}\n"
|
||||
this.outFolder.set(layout.buildDirectory.dir("outputs/apk/${variant.name}"))
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -277,7 +291,7 @@ fun Project.setupAppCommon() {
|
||||
fun Project.setupMainApk() {
|
||||
setupAppCommon()
|
||||
|
||||
android {
|
||||
androidApp {
|
||||
namespace = "com.topjohnwu.magisk"
|
||||
|
||||
defaultConfig {
|
||||
@@ -290,8 +304,10 @@ fun Project.setupMainApk() {
|
||||
debugSymbolLevel = "FULL"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
androidComponents.onVariants { variant ->
|
||||
androidComponents {
|
||||
onVariants { variant ->
|
||||
variant.instrumentation.apply {
|
||||
setAsmFramesComputationMode(COMPUTE_FRAMES_FOR_INSTRUMENTED_METHODS)
|
||||
transformClassesWith(
|
||||
@@ -314,17 +330,26 @@ const val SHAMIKO_CHECKSUM =
|
||||
fun Project.setupTestApk() {
|
||||
setupAppCommon()
|
||||
|
||||
androidApp.applicationVariants.all {
|
||||
val variantCapped = name.replaceFirstChar { it.uppercase() }
|
||||
val dlTask by tasks.register("download${variantCapped}Lsposed", Sync::class) {
|
||||
from(downloadFile(LSPOSED_DOWNLOAD_URL, LSPOSED_CHECKSUM)) {
|
||||
rename { "lsposed.zip" }
|
||||
androidComponents {
|
||||
onVariants { variant ->
|
||||
val variantName = variant.name
|
||||
val variantCapped = variantName.replaceFirstChar { it.uppercase() }
|
||||
|
||||
val dlTask = tasks.register("download${variantCapped}Lsposed", SyncWithDir::class) {
|
||||
outputFolder.set(layout.buildDirectory.dir("$variantName/lsposed"))
|
||||
into(outputFolder)
|
||||
|
||||
from(downloadFile(LSPOSED_DOWNLOAD_URL, LSPOSED_CHECKSUM)) {
|
||||
rename { "lsposed.zip" }
|
||||
}
|
||||
from(downloadFile(SHAMIKO_DOWNLOAD_URL, SHAMIKO_CHECKSUM)) {
|
||||
rename { "shamiko.zip" }
|
||||
}
|
||||
}
|
||||
from(downloadFile(SHAMIKO_DOWNLOAD_URL, SHAMIKO_CHECKSUM)) {
|
||||
rename { "shamiko.zip" }
|
||||
|
||||
variant.sources.assets?.let {
|
||||
it.addGeneratedSourceDirectory(dlTask, SyncWithDir::outputFolder)
|
||||
}
|
||||
into("src/${this@all.name}/assets")
|
||||
}
|
||||
mergeAssetsProvider.configure { dependsOn(dlTask) }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,13 +8,14 @@ import org.gradle.api.tasks.CacheableTask
|
||||
import org.gradle.api.tasks.Delete
|
||||
import org.gradle.api.tasks.Input
|
||||
import org.gradle.api.tasks.InputFile
|
||||
import org.gradle.api.tasks.InputFiles
|
||||
import org.gradle.api.tasks.OutputDirectory
|
||||
import org.gradle.api.tasks.OutputFile
|
||||
import org.gradle.api.tasks.PathSensitive
|
||||
import org.gradle.api.tasks.PathSensitivity
|
||||
import org.gradle.api.tasks.TaskAction
|
||||
import org.gradle.kotlin.dsl.assign
|
||||
import org.gradle.kotlin.dsl.named
|
||||
import org.gradle.kotlin.dsl.register
|
||||
import java.io.ByteArrayInputStream
|
||||
import java.io.ByteArrayOutputStream
|
||||
import java.io.File
|
||||
@@ -82,18 +83,16 @@ private abstract class ManifestUpdater: DefaultTask() {
|
||||
@get:Input
|
||||
abstract val applicationId: Property<String>
|
||||
|
||||
@get:Input
|
||||
abstract val factoryClass: Property<String>
|
||||
|
||||
@get:Input
|
||||
abstract val appClass: Property<String>
|
||||
|
||||
@get:InputFile
|
||||
@get:PathSensitive(PathSensitivity.RELATIVE)
|
||||
abstract val mergedManifest: RegularFileProperty
|
||||
|
||||
@get:InputFiles
|
||||
@get:PathSensitive(PathSensitivity.RELATIVE)
|
||||
abstract val factoryClassDir: DirectoryProperty
|
||||
|
||||
@get:InputFiles
|
||||
@get:PathSensitive(PathSensitivity.RELATIVE)
|
||||
abstract val appClassDir: DirectoryProperty
|
||||
|
||||
@get:OutputFile
|
||||
abstract val outputManifest: RegularFileProperty
|
||||
|
||||
@@ -170,25 +169,18 @@ private abstract class ManifestUpdater: DefaultTask() {
|
||||
|
||||
// Shuffle the order of the components
|
||||
cmpList.shuffle(RANDOM)
|
||||
val (factoryPkg, factoryClass) = factoryClassDir.asFileTree.firstNotNullOf {
|
||||
it.parentFile!!.name to it.name.removeSuffix(".java")
|
||||
}
|
||||
val (appPkg, appClass) = appClassDir.asFileTree.firstNotNullOf {
|
||||
it.parentFile!!.name to it.name.removeSuffix(".java")
|
||||
}
|
||||
val components = cmpList.joinToString("\n\n")
|
||||
.replace("\${applicationId}", applicationId.get())
|
||||
val manifest = mergedManifest.asFile.get().readText().replace(Regex(".*\\<application"), """
|
||||
|<application
|
||||
| android:appComponentFactory="$factoryPkg.$factoryClass"
|
||||
| android:name="$appPkg.$appClass"""".ind(1)
|
||||
| android:appComponentFactory="${factoryClass.get()}"
|
||||
| android:name="${appClass.get()}"""".ind(1)
|
||||
).replace(Regex(".*\\<\\/application"), "$components\n </application")
|
||||
outputManifest.get().asFile.writeText(manifest)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private fun genStubClasses(factoryOutDir: File, appOutDir: File) {
|
||||
private fun genStubClasses(outDir: File): Pair<String, String> {
|
||||
val classNameGenerator = sequence {
|
||||
fun notJavaKeyword(name: String) = when (name) {
|
||||
"do", "if", "for", "int", "new", "try" -> false
|
||||
@@ -217,7 +209,7 @@ private fun genStubClasses(factoryOutDir: File, appOutDir: File) {
|
||||
}
|
||||
}.distinct().iterator()
|
||||
|
||||
fun genClass(type: String, outDir: File) {
|
||||
fun genClass(type: String, outDir: File): String {
|
||||
val clzName = classNameGenerator.next()
|
||||
val (pkg, name) = clzName.split('.')
|
||||
val pkgDir = File(outDir, pkg)
|
||||
@@ -226,10 +218,12 @@ private fun genStubClasses(factoryOutDir: File, appOutDir: File) {
|
||||
it.println("package $pkg;")
|
||||
it.println("public class $name extends com.topjohnwu.magisk.$type {}")
|
||||
}
|
||||
return clzName
|
||||
}
|
||||
|
||||
genClass("DelegateComponentFactory", factoryOutDir)
|
||||
genClass("StubApplication", appOutDir)
|
||||
val factory = genClass("DelegateComponentFactory", outDir)
|
||||
val app = genClass("StubApplication", outDir)
|
||||
return Pair(factory, app)
|
||||
}
|
||||
|
||||
private fun genEncryptedResources(res: ByteArray, outDir: File) {
|
||||
@@ -264,74 +258,76 @@ private fun genEncryptedResources(res: ByteArray, outDir: File) {
|
||||
}
|
||||
}
|
||||
|
||||
private abstract class TaskWithDir : DefaultTask() {
|
||||
@get:OutputDirectory
|
||||
abstract val outputFolder: DirectoryProperty
|
||||
}
|
||||
|
||||
fun Project.setupStubApk() {
|
||||
setupAppCommon()
|
||||
|
||||
androidComponents.onVariants { variant ->
|
||||
val variantName = variant.name
|
||||
val variantCapped = variantName.replaceFirstChar { it.uppercase() }
|
||||
val manifestUpdater =
|
||||
project.tasks.register("${variantName}ManifestProducer", ManifestUpdater::class.java) {
|
||||
dependsOn("generate${variantCapped}ObfuscatedClass")
|
||||
applicationId = variant.applicationId
|
||||
appClassDir.set(layout.buildDirectory.dir("generated/source/app/$variantName"))
|
||||
factoryClassDir.set(layout.buildDirectory.dir("generated/source/factory/$variantName"))
|
||||
}
|
||||
variant.artifacts.use(manifestUpdater)
|
||||
.wiredWithFiles(
|
||||
ManifestUpdater::mergedManifest,
|
||||
ManifestUpdater::outputManifest)
|
||||
.toTransform(SingleArtifact.MERGED_MANIFEST)
|
||||
}
|
||||
androidAppComponents {
|
||||
onVariants { variant ->
|
||||
val variantName = variant.name
|
||||
val variantCapped = variantName.replaceFirstChar { it.uppercase() }
|
||||
val variantLowered = variantName.lowercase()
|
||||
|
||||
androidApp.applicationVariants.all {
|
||||
val variantCapped = name.replaceFirstChar { it.uppercase() }
|
||||
val variantLowered = name.lowercase()
|
||||
val outFactoryClassDir = layout.buildDirectory.file("generated/source/factory/${variantLowered}").get().asFile
|
||||
val outAppClassDir = layout.buildDirectory.file("generated/source/app/${variantLowered}").get().asFile
|
||||
val outResDir = layout.buildDirectory.dir("generated/source/res/${variantLowered}").get().asFile
|
||||
val aapt = File(androidApp.sdkDirectory, "build-tools/${androidApp.buildToolsVersion}/aapt2")
|
||||
val apk = layout.buildDirectory.file("intermediates/linked_resources_binary_format/" +
|
||||
"${variantLowered}/process${variantCapped}Resources/linked-resources-binary-format-${variantLowered}.ap_").get().asFile
|
||||
val componentJavaOutDir = layout.buildDirectory
|
||||
.dir("generated/${variantLowered}/components").get().asFile
|
||||
|
||||
val genManifestTask = tasks.register("generate${variantCapped}ObfuscatedClass") {
|
||||
inputs.property("seed", RAND_SEED)
|
||||
outputs.dirs(outFactoryClassDir, outAppClassDir)
|
||||
doLast {
|
||||
outFactoryClassDir.mkdirs()
|
||||
outAppClassDir.mkdirs()
|
||||
genStubClasses(outFactoryClassDir, outAppClassDir)
|
||||
}
|
||||
}
|
||||
registerJavaGeneratingTask(genManifestTask, outFactoryClassDir, outAppClassDir)
|
||||
val (factory, app) = genStubClasses(componentJavaOutDir)
|
||||
|
||||
val processResourcesTask = tasks.named("process${variantCapped}Resources") {
|
||||
outputs.dir(outResDir)
|
||||
doLast {
|
||||
val apkTmp = File("${apk}.tmp")
|
||||
providers.exec {
|
||||
commandLine(aapt, "optimize", "-o", apkTmp, "--collapse-resource-names", apk)
|
||||
}.result.get()
|
||||
|
||||
val bos = ByteArrayOutputStream()
|
||||
ZipFile(apkTmp).use { src ->
|
||||
ZipOutputStream(apk.outputStream()).use {
|
||||
it.setLevel(Deflater.BEST_COMPRESSION)
|
||||
it.putNextEntry(ZipEntry("AndroidManifest.xml"))
|
||||
src.getInputStream(src.getEntry("AndroidManifest.xml")).transferTo(it)
|
||||
it.closeEntry()
|
||||
}
|
||||
DeflaterOutputStream(bos, Deflater(Deflater.BEST_COMPRESSION)).use {
|
||||
src.getInputStream(src.getEntry("resources.arsc")).transferTo(it)
|
||||
}
|
||||
val manifestUpdater =
|
||||
project.tasks.register("${variantName}ManifestProducer", ManifestUpdater::class.java) {
|
||||
applicationId = variant.applicationId
|
||||
factoryClass.set(factory)
|
||||
appClass.set(app)
|
||||
}
|
||||
apkTmp.delete()
|
||||
genEncryptedResources(bos.toByteArray(), outResDir)
|
||||
variant.artifacts.use(manifestUpdater)
|
||||
.wiredWithFiles(
|
||||
ManifestUpdater::mergedManifest,
|
||||
ManifestUpdater::outputManifest)
|
||||
.toTransform(SingleArtifact.MERGED_MANIFEST)
|
||||
|
||||
val aapt = sdkComponents.aapt2.get().executable.get().asFile
|
||||
val apk = layout.buildDirectory.file("intermediates/linked_resources_binary_format/" +
|
||||
"${variantLowered}/process${variantCapped}Resources/" +
|
||||
"linked-resources-binary-format-${variantLowered}.ap_").get().asFile
|
||||
|
||||
val genResourcesTask = tasks.register("generate${variantCapped}BundledResources", TaskWithDir::class) {
|
||||
dependsOn("process${variantCapped}Resources")
|
||||
outputFolder.set(layout.buildDirectory.dir("generated/${variantLowered}/resources"))
|
||||
|
||||
doLast {
|
||||
val apkTmp = File("${apk}.tmp")
|
||||
providers.exec {
|
||||
commandLine(aapt, "optimize", "-o", apkTmp, "--collapse-resource-names", apk)
|
||||
}.result.get()
|
||||
|
||||
val bos = ByteArrayOutputStream()
|
||||
ZipFile(apkTmp).use { src ->
|
||||
ZipOutputStream(apk.outputStream()).use {
|
||||
it.setLevel(Deflater.BEST_COMPRESSION)
|
||||
it.putNextEntry(ZipEntry("AndroidManifest.xml"))
|
||||
src.getInputStream(src.getEntry("AndroidManifest.xml")).transferTo(it)
|
||||
it.closeEntry()
|
||||
}
|
||||
DeflaterOutputStream(bos, Deflater(Deflater.BEST_COMPRESSION)).use {
|
||||
src.getInputStream(src.getEntry("resources.arsc")).transferTo(it)
|
||||
}
|
||||
}
|
||||
apkTmp.delete()
|
||||
genEncryptedResources(bos.toByteArray(), outputFolder.get().asFile)
|
||||
}
|
||||
}
|
||||
|
||||
variant.sources.java?.let {
|
||||
it.addStaticSourceDirectory(componentJavaOutDir.path)
|
||||
it.addGeneratedSourceDirectory(genResourcesTask, TaskWithDir::outputFolder)
|
||||
}
|
||||
}
|
||||
|
||||
registerJavaGeneratingTask(processResourcesTask, outResDir)
|
||||
}
|
||||
|
||||
// Override optimizeReleaseResources task
|
||||
val apk = layout.buildDirectory.file("intermediates/linked_resources_binary_format/" +
|
||||
"release/processReleaseResources/linked-resources-binary-format-release.ap_").get().asFile
|
||||
|
||||
3
app/core/.gitignore
vendored
3
app/core/.gitignore
vendored
@@ -1,3 +0,0 @@
|
||||
/build
|
||||
src/debug
|
||||
src/release
|
||||
@@ -1,6 +1,5 @@
|
||||
plugins {
|
||||
id("com.android.library")
|
||||
kotlin("android")
|
||||
kotlin("plugin.parcelize")
|
||||
id("dev.zacsweers.moshix")
|
||||
id("com.google.devtools.ksp")
|
||||
|
||||
4
app/core/proguard-rules.pro
vendored
4
app/core/proguard-rules.pro
vendored
@@ -33,9 +33,5 @@
|
||||
# is used.
|
||||
-keep,allowobfuscation,allowshrinking class kotlin.coroutines.Continuation
|
||||
|
||||
# Excessive obfuscation
|
||||
-flattenpackagehierarchy
|
||||
-allowaccessmodification
|
||||
|
||||
-dontwarn org.junit.**
|
||||
-dontwarn org.apache.**
|
||||
|
||||
@@ -19,7 +19,7 @@ abstract class SuLogDatabase : RoomDatabase() {
|
||||
|
||||
companion object {
|
||||
val MIGRATION_1_2 = object : Migration(1, 2) {
|
||||
override fun migrate(database: SupportSQLiteDatabase) = with(database) {
|
||||
override fun migrate(db: SupportSQLiteDatabase) = with(db) {
|
||||
execSQL("ALTER TABLE logs ADD COLUMN target INTEGER NOT NULL DEFAULT -1")
|
||||
execSQL("ALTER TABLE logs ADD COLUMN context TEXT NOT NULL DEFAULT ''")
|
||||
execSQL("ALTER TABLE logs ADD COLUMN gids TEXT NOT NULL DEFAULT ''")
|
||||
|
||||
@@ -44,7 +44,7 @@ object ServiceLocator {
|
||||
private fun createSuLogDatabase(context: Context) =
|
||||
Room.databaseBuilder(context, SuLogDatabase::class.java, "sulogs.db")
|
||||
.addMigrations(SuLogDatabase.MIGRATION_1_2)
|
||||
.fallbackToDestructiveMigration()
|
||||
.fallbackToDestructiveMigration(true)
|
||||
.build()
|
||||
|
||||
private fun createMarkwon(context: Context) =
|
||||
|
||||
@@ -34,7 +34,7 @@ data class ModuleJson(
|
||||
@JsonClass(generateAdapter = true)
|
||||
data class ReleaseAssets(
|
||||
val name: String,
|
||||
@Json(name = "browser_download_url") val url: String,
|
||||
@param:Json(name = "browser_download_url") val url: String,
|
||||
)
|
||||
|
||||
class DateTimeAdapter {
|
||||
@@ -51,12 +51,12 @@ class DateTimeAdapter {
|
||||
|
||||
@JsonClass(generateAdapter = true)
|
||||
data class Release(
|
||||
@Json(name = "tag_name") val tag: String,
|
||||
@param:Json(name = "tag_name") val tag: String,
|
||||
val name: String,
|
||||
val prerelease: Boolean,
|
||||
val assets: List<ReleaseAssets>,
|
||||
val body: String,
|
||||
@Json(name = "created_at") val createdTime: Instant,
|
||||
@param:Json(name = "created_at") val createdTime: Instant,
|
||||
) {
|
||||
val versionCode: Int get() {
|
||||
return if (tag[0] == 'v') {
|
||||
|
||||
@@ -15,18 +15,18 @@
|
||||
<string name="app_changelog">앱 변경 사항</string>
|
||||
<string name="loading">로딩중…</string>
|
||||
<string name="update">업데이트</string>
|
||||
<string name="not_available">N/A</string>
|
||||
<string name="not_available">알 수 없음</string>
|
||||
<string name="hide">숨기기</string>
|
||||
<string name="home_package">패키지</string>
|
||||
<string name="home_app_title">앱</string>
|
||||
|
||||
<string name="home_notice_content">공식 Github 페이지에서 Magisk를 다운로드하십시오. 알 수 없는 소스의 파일이 악의적일 수 있습니다!</string>
|
||||
<string name="home_notice_content">공식 Github 페이지에서 Magisk를 다운로드하십시오. 알 수 없는 출처에서 받은 파일이 위험할 수 있습니다!</string>
|
||||
<string name="home_support_title">후원하기</string>
|
||||
<string name="home_item_source">소스</string>
|
||||
<string name="home_support_content">Magisk는 항상 무료일 것이며, 오픈소스일 것입니다. 그러나 소액의 후원을 통해 관심을 표할 수 있습니다.</string>
|
||||
<string name="home_installed_version">설치됨</string>
|
||||
<string name="home_latest_version">최신</string>
|
||||
<string name="invalid_update_channel">올바르지 않은 업데이트 채널</string>
|
||||
<string name="invalid_update_channel">잘못된 업데이트 채널</string>
|
||||
<string name="uninstall_magisk_title">Magisk 제거</string>
|
||||
<string name="uninstall_magisk_msg">모든 모듈이 비활성화/제거됩니다. 루트도 제거될 것이며, 데이터도 암호화 되어있지 않으면 암호화될 수도 있습니다.</string>
|
||||
|
||||
@@ -51,11 +51,11 @@
|
||||
<!--Superuser-->
|
||||
<string name="su_request_title">슈퍼유저 요청</string>
|
||||
<string name="touch_filtered_warning">앱이 슈퍼유저 요청을 가려, Magisk에서 응답을 확인할 수 없습니다.</string>
|
||||
<string name="deny">일괄 거부</string>
|
||||
<string name="prompt">수동 허가</string>
|
||||
<string name="grant">일괄 허용</string>
|
||||
<string name="su_warning">기기에 대한 전체 액세스 권한을 부여합니다.\n확실하지 않은 경우 거부하세요!</string>
|
||||
<string name="forever">영구적으로</string>
|
||||
<string name="deny">모두 거부</string>
|
||||
<string name="prompt">물어보기</string>
|
||||
<string name="grant">모두 허용</string>
|
||||
<string name="su_warning">기기에 대한 슈퍼유저 권한을 부여합니다.\n확실하지 않은 경우 거부하세요!</string>
|
||||
<string name="forever">영구</string>
|
||||
<string name="once">한 번만</string>
|
||||
<string name="tenmin">10분</string>
|
||||
<string name="twentymin">20분</string>
|
||||
@@ -69,20 +69,20 @@
|
||||
<string name="su_snack_notif_off">%1$s의 알림이 비활성화됨</string>
|
||||
<string name="su_snack_log_on">%1$s의 로깅이 활성화됨</string>
|
||||
<string name="su_snack_log_off">%1$s의 로깅이 비활성화됨</string>
|
||||
<string name="su_revoke_title">취소하시겠습니까?</string>
|
||||
<string name="su_revoke_msg">정말 %1$s의 권한을 취소하시겠습니까?</string>
|
||||
<string name="su_revoke_title">제거하시겠습니까?</string>
|
||||
<string name="su_revoke_msg">정말 %1$s의 권한을 제거하시겠습니까?</string>
|
||||
<string name="toast">토스트</string>
|
||||
<string name="none">없음</string>
|
||||
|
||||
<string name="superuser_toggle_notification">알림</string>
|
||||
<string name="superuser_toggle_revoke">권한삭제</string>
|
||||
<string name="superuser_toggle_revoke">권한 제거</string>
|
||||
<string name="superuser_policy_none">슈퍼유저 권한을 요청한 앱이 없습니다.</string>
|
||||
|
||||
<!--Logs-->
|
||||
<string name="log_data_none">로그가 없습니다. 슈퍼유저 권한을 필요로 하는 앱을 사용하십시오.</string>
|
||||
<string name="log_data_magisk_none">Magisk 로그가 없습니다.</string>
|
||||
<string name="menuSaveLog">로그 저장</string>
|
||||
<string name="menuClearLog">지금 로그 삭제</string>
|
||||
<string name="menuClearLog">로그 삭제</string>
|
||||
<string name="logs_cleared">로그 삭제 완료.</string>
|
||||
<string name="pid">PID: %1$d</string>
|
||||
<string name="target_uid">Target UID: %1$d</string>
|
||||
@@ -96,9 +96,9 @@
|
||||
<string name="hide_search">검색</string>
|
||||
|
||||
<!--Module-->
|
||||
<string name="no_info_provided">(제공된 정보 없음)</string>
|
||||
<string name="no_info_provided">(정보 없음)</string>
|
||||
<string name="reboot_userspace">조용히 다시 시작</string>
|
||||
<string name="reboot_recovery">리커버리로 다시 시작</string>
|
||||
<string name="reboot_recovery">복구 모드로 다시 시작</string>
|
||||
<string name="reboot_bootloader">부트로더로 다시 시작</string>
|
||||
<string name="reboot_download">다운로드 모드로 다시 시작</string>
|
||||
<string name="reboot_edl">EDL로 다시 시작</string>
|
||||
@@ -107,20 +107,20 @@
|
||||
<string name="module_state_restore">복구</string>
|
||||
<string name="module_action_install_external">저장소에서 설치</string>
|
||||
<string name="update_available">업데이트 가능</string>
|
||||
<string name="suspend_text_riru">%1$s 가 활성화 되어있어 모듈 로드가 일시정지 되었습니다.</string>
|
||||
<string name="suspend_text_zygisk">%1$s 가 비활성화 되어있어 모듈이 로드되지 않았습니다.</string>
|
||||
<string name="suspend_text_riru">%1$s 가 활성화 되어있어 모듈이 로드되지 않았습니다.</string>
|
||||
<string name="suspend_text_zygisk">%1$s 가 활성화되어 있지 않아 모듈이 로드되지 않았습니다.</string>
|
||||
<string name="zygisk_module_unloaded">호환성 문제로 인해 Zygisk 모듈이 로드되지 않았습니다.</string>
|
||||
|
||||
<!--Settings-->
|
||||
<string name="settings_dark_mode_title">테마 선택</string>
|
||||
<string name="settings_dark_mode_message">원하는 테마 모드를 선택하세요!</string>
|
||||
<string name="settings_dark_mode_light">기본</string>
|
||||
<string name="settings_dark_mode_system">시스템 설정값</string>
|
||||
<string name="settings_dark_mode_system">자동</string>
|
||||
<string name="settings_dark_mode_dark">다크 모드</string>
|
||||
<string name="settings_download_path_title">다운로드 경로</string>
|
||||
<string name="settings_download_path_title">다운로드 위치</string>
|
||||
<string name="settings_download_path_message">파일이 %1$s에 저장됩니다</string>
|
||||
<string name="settings_hide_app_title">Magisk 앱 숨기기</string>
|
||||
<string name="settings_hide_app_summary">랜덤 패키지 ID와 커스텀 앱 이름으로 Magisk 프록시 앱을 설치합니다.</string>
|
||||
<string name="settings_hide_app_summary">무작위 패키지명과 사용자 지정 앱 이름으로 Magisk 프록시 앱을 설치합니다.</string>
|
||||
<string name="settings_restore_app_title">Magisk 앱 복원</string>
|
||||
<string name="settings_restore_app_summary">앱 숨기기를 해제하고 원래 APK로 복원합니다.</string>
|
||||
<string name="language">언어</string>
|
||||
@@ -167,12 +167,12 @@
|
||||
<string name="settings_doh_description">일부 국가에 존재하는 DNS 포이즈닝을 해결합니다.</string>
|
||||
|
||||
<string name="multiuser_mode">다중 사용자 모드</string>
|
||||
<string name="settings_owner_only">기기 소유자만</string>
|
||||
<string name="settings_owner_manage">기기 소유자에 의해 관리됨</string>
|
||||
<string name="settings_user_independent">사용자별</string>
|
||||
<string name="owner_only_summary">소유자만 루트 액세스를 갖습니다.</string>
|
||||
<string name="owner_manage_summary">소유자만 루트 액세스를 관리하고 요청을 받을 수 있습니다.</string>
|
||||
<string name="user_independent_summary">각각의 사용자가 개별적인 권한을 갖습니다.</string>
|
||||
<string name="settings_owner_only">주인 사용자만</string>
|
||||
<string name="settings_owner_manage">주인 사용자에 의해 관리됨</string>
|
||||
<string name="settings_user_independent">사용자별 분리</string>
|
||||
<string name="owner_only_summary">주인 사용자만 루트 액세스를 갖습니다.</string>
|
||||
<string name="owner_manage_summary">주인 사용자가 다른 사용자들의 루트 액세스를 관리하고 요청을 받을 수 있습니다.</string>
|
||||
<string name="user_independent_summary">각각의 사용자가 권한을 관리합니다.</string>
|
||||
|
||||
<string name="mount_namespace_mode">네임스페이스 마운트 모드</string>
|
||||
<string name="settings_ns_global">전역 네임스페이스</string>
|
||||
@@ -187,7 +187,7 @@
|
||||
<string name="progress_channel">진행 상황</string>
|
||||
<string name="updated_channel">업데이트 완료</string>
|
||||
<string name="download_complete">다운로드 완료</string>
|
||||
<string name="download_file_error">파일 다운로드 오류</string>
|
||||
<string name="download_file_error">파일 다운로드 실패</string>
|
||||
<string name="magisk_update_title">새 버전의 Magisk를 사용할 수 있습니다!</string>
|
||||
<string name="updated_title">Magisk가 업데이트 되었습니다!</string>
|
||||
<string name="updated_text">터치하여 앱 열기</string>
|
||||
@@ -208,7 +208,7 @@
|
||||
<string name="restore_img">이미지 복구</string>
|
||||
<string name="restore_img_msg">복구하는 중…</string>
|
||||
<string name="restore_done">복구 완료!</string>
|
||||
<string name="restore_fail">원 백업이 존재하지 않습니다!</string>
|
||||
<string name="restore_fail">백업이 존재하지 않습니다!</string>
|
||||
<string name="setup_fail">설치 실패</string>
|
||||
<string name="env_fix_title">추가 설정 필요</string>
|
||||
<string name="env_fix_msg">Magisk가 제대로 작동하려면 추가 설정이 필요합니다. 다시 시작 하시겠습니까?</string>
|
||||
@@ -219,10 +219,10 @@
|
||||
<string name="unsupport_system_app_msg">해당 앱을 시스템 앱으로 실행하는 것은 지원되지 않습니다. 앱을 일반 사용자 앱으로 실행해 주세요.</string>
|
||||
<string name="unsupport_other_su_msg">Magisk으로 부터 설치되지 않은 \"su\" 바이너리가 감지되었습니다. 다른 루팅 방법을 제거하거나, Magisk 를 다시 설치해주세요.</string>
|
||||
<string name="unsupport_external_storage_msg">Magisk 가 외부 저장소에 설치되어 있습니다. Magisk 를 내부 저장소에 설치 해주세요.</string>
|
||||
<string name="unsupport_nonroot_stub_msg">숨겨진 Magisk 앱은 루팅이 풀려 더이상 작동하지 못합니다. 본래 APK 를 복원하거나 재설치 해주세요.</string>
|
||||
<string name="unsupport_nonroot_stub_msg">이 숨겨진 Magisk 앱은 루트 권한이 손실되어 사용할 수 없습니다. 원래 APK 를 복원하거나 재설치 해주세요.</string>
|
||||
<string name="unsupport_nonroot_stub_title">@string/settings_restore_app_title</string>
|
||||
<string name="external_rw_permission_denied">해당 기능을 사용하려면 저장소 권한을 허용해 주십시오.</string>
|
||||
<string name="install_unknown_denied">이 기능을 활성화 하려면 "알 수 없는 앱 설치"를 허용해주세요.</string>
|
||||
<string name="install_unknown_denied">이 기능을 활성화 하려면 "출처를 알 수 없는 앱 설치"를 허용해주세요.</string>
|
||||
<string name="add_shortcut_title">홈 화면에 바로가기 추가</string>
|
||||
<string name="add_shortcut_msg">앱을 숨긴 후 아이콘과 이름을 알아보기 힘들 경우를 위해 알아보기 쉬운 바로가기를 홈 화면에 추가합니다.</string>
|
||||
<string name="app_not_found">해당 작업을 처리할 어플리케이션이 없습니다.</string>
|
||||
|
||||
@@ -2,242 +2,255 @@
|
||||
|
||||
<!--Sections-->
|
||||
<string name="modules">Modulet</string>
|
||||
<string name="superuser">Super-përdoruesi</string>
|
||||
<string name="logs">Regjistrat</string>
|
||||
<string name="settings">Cilësimet</string>
|
||||
<string name="superuser">Superuser</string>
|
||||
<string name="logs">Regjistrimet</string>
|
||||
<string name="settings">Parametrat</string>
|
||||
<string name="install">Instalo</string>
|
||||
<string name="section_home">Shtëpi</string>
|
||||
<string name="section_theme">Tema</string>
|
||||
<string name="denylist">Lista e mohimit</string>
|
||||
<string name="section_home">Shtëpia</string>
|
||||
<string name="section_theme">Temat</string>
|
||||
<string name="denylist">Lista e ndaluar</string>
|
||||
|
||||
<!--Home-->
|
||||
<string name="no_connection">Nuk ka lidhje interneti</string>
|
||||
<string name="app_changelog">Ndryshimet</string>
|
||||
<string name="loading">Po ngarkohet…</string>
|
||||
<string name="no_connection">Nuk ka lidhje të disponueshme</string>
|
||||
<string name="app_changelog">Shënimet e ndryshimeve</string>
|
||||
<string name="loading">Duke u ngarkuar…</string>
|
||||
<string name="update">Përditëso</string>
|
||||
<string name="not_available">N/A</string>
|
||||
<string name="hide">Fshih</string>
|
||||
<string name="home_package">Paketa</string>
|
||||
<string name="home_app_title">App</string>
|
||||
|
||||
<string name="home_notice_content">Shkarkoni Magisk VETEM nga faqja zyrtare e GitHub. Skedarët nga burime të panjohura mund të jenë me qëllim të keq! </string>
|
||||
<string name="home_app_title">Aplikacioni</string>
|
||||
<string name="home_notice_content">Shkarkoni Magisk VETËM nga faqja zyrtare në GitHub. Skedarët nga burime të panjohura mund të jenë të dëmshëm!</string>
|
||||
<string name="home_support_title">Na mbështetni</string>
|
||||
<string name="home_follow_title">Na ndiqni</string>
|
||||
<string name="home_item_source">Burimi</string>
|
||||
<string name="home_support_content">Magisk është, dhe gjithmonë do të jetë, falas dhe me burim të hapur. Sidoqoftë, mund të na tregoni se kujdeseni duke dërguar një donacion të vogël.</string>
|
||||
<string name="home_support_content">Magisk është dhe do të mbetet gjithmonë falas dhe me burim të hapur. Megjithatë, mund të na mbështesni duke bërë një donacion.</string>
|
||||
<string name="home_installed_version">Instaluar</string>
|
||||
<string name="home_latest_version">E fundit</string>
|
||||
<string name="invalid_update_channel">Kanali i përditësimit i pavlefshëm</string>
|
||||
<string name="home_latest_version">Më i fundit</string>
|
||||
<string name="invalid_update_channel">Kanal i pavlefshëm për përditësime</string>
|
||||
<string name="uninstall_magisk_title">Çinstalo Magisk</string>
|
||||
<string name="uninstall_magisk_msg">Të gjitha modulet do të çaktivizohen/hiqen!\nRrënja do të hiqet!\nTë dhënat tuaja potencialisht të koduara nëse jo tashmë!</string>
|
||||
<string name="uninstall_magisk_msg">Të gjitha modulet do të çaktivizohen/hiqen!
|
||||
Root-i do të hiqet!
|
||||
Çdo memorie e brendshme që është çenkriptuar përmes Magisk do të rikriptohet!</string>
|
||||
|
||||
<!--Install-->
|
||||
<string name="keep_force_encryption">Ruaj kriptimin me forcë</string>
|
||||
<string name="keep_force_encryption">Ruaj enkriptimin e detyruar</string>
|
||||
<string name="keep_dm_verity">Ruaj AVB 2.0/dm-verity</string>
|
||||
<string name="recovery_mode">Recovery Mode</string>
|
||||
<string name="recovery_mode">Mënyra Recovery</string>
|
||||
<string name="install_options_title">Opsionet</string>
|
||||
<string name="install_method_title">Metoda</string>
|
||||
<string name="install_next">Tjetër</string>
|
||||
<string name="install_start">Shkojme</string>
|
||||
<string name="manager_download_install">Shtypni për ta shkarkuar dhe instaluarl</string>
|
||||
<string name="direct_install">Instalimi i direkt (Rekomandohet)</string>
|
||||
<string name="install_inactive_slot">Instaloni në slotin joaktiv(Pas OTA)</string>
|
||||
<string name="install_inactive_slot_msg">Pajisja juaj do të detyrohet të fillojë në folenë aktuale joaktive pas një rindezje!\nPërdoreni këtë opsion vetëm pasi të keni përfunduar OTA.\nVazhdo?</string>
|
||||
<string name="setup_title">Konfigurimet shtesë</string>
|
||||
<string name="select_patch_file">Zgjidhni dhe Patch një skader</string>
|
||||
<string name="patch_file_msg">Zgjidhni një imazh të papërpunuar (*.img) ose një skedar ODIN (*.tar) ose një payload.bin (*.bin)</string>
|
||||
<string name="reboot_delay_toast">Rinisje pas 5 sekondash…</string>
|
||||
<string name="install_next">Vazhdoni</string>
|
||||
<string name="install_start">Le të fillojmë</string>
|
||||
<string name="manager_download_install">Shtypni për të shkarkuar dhe instaluar</string>
|
||||
<string name="direct_install">Instalim i drejtpërdrejtë (Rekomandohet)</string>
|
||||
<string name="install_inactive_slot">Instalo në slot-in joaktiv (Pas OTA)</string>
|
||||
<string name="install_inactive_slot_msg">Pajisja juaj do të detyrohet të niset në slot-in joaktiv pas rinisjes!
|
||||
Përdorni këtë opsion vetëm pasi OTA të ketë përfunduar.
|
||||
Të vazhdoj?</string>
|
||||
<string name="setup_title">Konfigurim shtesë</string>
|
||||
<string name="select_patch_file">Zgjidh dhe përpuno një skedar</string>
|
||||
<string name="patch_file_msg">Zgjidh një imazh të papërpunuar (*.img) ose një skedar ODIN (*.tar) ose një payload.bin (*.bin)</string>
|
||||
<string name="reboot_delay_toast">Rinisja pas 5 sekondash…</string>
|
||||
<string name="flash_screen_title">Instalimi</string>
|
||||
|
||||
<!--Superuser-->
|
||||
<string name="su_request_title">Kërkesë nga superpërdoruesi</string>
|
||||
<string name="touch_filtered_warning">Për shkak se një aplikacion po errëson një kërkesë të superpërdoruesit, Magisk nuk mund të verifikojë përgjigjen tuaj</string>
|
||||
<string name="su_request_title">Kërkesë Superuser</string>
|
||||
<string name="touch_filtered_warning">Për shkak se një aplikacion po mbivendos kërkesën Superuser, Magisk nuk mund të verifikojë përgjigjen tuaj.</string>
|
||||
<string name="deny">Refuzo</string>
|
||||
<string name="prompt">Pyet</string>
|
||||
<string name="prompt">Pyete</string>
|
||||
<string name="restrict">Kufizo</string>
|
||||
<string name="grant">Lejo</string>
|
||||
<string name="su_warning">Jep akses të plotë në pajisjen tuaj.\nRefuzo nëse nuk jeni të sigurt!</string>
|
||||
<string name="forever">Gjithmonë</string>
|
||||
<string name="su_warning">Jep akses të plotë në pajisjen tuaj.
|
||||
Refuzoni nëse nuk jeni të sigurt!</string>
|
||||
<string name="forever">Përgjithmonë</string>
|
||||
<string name="once">Një herë</string>
|
||||
<string name="tenmin">10 minuta</string>
|
||||
<string name="twentymin">20 minuta</string>
|
||||
<string name="thirtymin">30 minuta</string>
|
||||
<string name="sixtymin">60 minuta</string>
|
||||
<string name="su_allow_toast">%1$s iu dha aksesi te Super-përdoruesi</string>
|
||||
<string name="su_deny_toast">%1$s iu refuzua aksesi te Super -përdoruesi</string>
|
||||
<string name="su_snack_grant">Aksesi i super-përdoruesit te %1$s është lenuar</string>
|
||||
<string name="su_snack_deny">Aksesi i super-përdoruesit te %1$s është refuzuar</string>
|
||||
<string name="su_snack_notif_on">Njoftimet e %1$s janë aktivizuar</string>
|
||||
<string name="su_snack_notif_off">Njoftimet e %1$s janë çaktivizuar</string>
|
||||
<string name="su_snack_log_on">Regjistrat e %1$s janë aktivizuar</string>
|
||||
<string name="su_snack_log_off">Regjistrat e %1$s janë çaktivizuar</string>
|
||||
<string name="su_revoke_title">Të drejtat?</string>
|
||||
<string name="su_revoke_msg">Konfirmo për të hequr të drejtat e %1$s?</string>
|
||||
<string name="toast">Dolli</string>
|
||||
<string name="su_allow_toast">%1$s mori të drejtat Superuser</string>
|
||||
<string name="su_deny_toast">%1$s u refuzua të drejtat Superuser</string>
|
||||
<string name="su_snack_grant">%1$s mori të drejtat Superuser</string>
|
||||
<string name="su_snack_deny">%1$s u refuzua të drejtat Superuser</string>
|
||||
<string name="su_snack_notif_on">Njoftimet për %1$s u aktivizuan</string>
|
||||
<string name="su_snack_notif_off">Njoftimet për %1$s u çaktivizuan</string>
|
||||
<string name="su_snack_log_on">Regjistrimi për %1$s u aktivizua</string>
|
||||
<string name="su_snack_log_off">Regjistrimi për %1$s u çaktivizua</string>
|
||||
<string name="su_revoke_title">Të hiqen?</string>
|
||||
<string name="su_revoke_msg">Konfirmoni heqjen e të drejtave Superuser për %1$s</string>
|
||||
<string name="toast">Njoftim</string>
|
||||
<string name="none">Asnjë</string>
|
||||
<string name="superuser_toggle_notification">Njoftimet</string>
|
||||
<string name="superuser_toggle_revoke">Të drejtat</string>
|
||||
<string name="superuser_policy_none">Asnjë aplikacion nuk ka kërkuar akoma akses për super-përdoruesin.</string>
|
||||
<string name="superuser_toggle_revoke">Hiq</string>
|
||||
<string name="superuser_policy_none">Asnjë aplikacion nuk ka kërkuar ende leje Superuser.</string>
|
||||
|
||||
<!--Logs-->
|
||||
<string name="log_data_none">Nuk ka regjistra, provoni të përdorni më shumë aplikacionet tuaja me SU</string>
|
||||
<string name="log_data_magisk_none">Regjistrat Magisk janë bosh, kjo është e çuditshme</string>
|
||||
<string name="menuSaveLog">Ruaj regjistrar</string>
|
||||
<string name="menuClearLog">Pastro regjistrat tani</string>
|
||||
<string name="logs_cleared">Regjistrat u pastuan me sukses</string>
|
||||
<string name="log_data_none">Nuk keni regjistrime. Provojeni të përdorni më shumë aplikacionet me root.</string>
|
||||
<string name="log_data_magisk_none">Regjistrimet e Magisk janë bosh — çuditërisht.</string>
|
||||
<string name="menuSaveLog">Ruaj regjistrimin</string>
|
||||
<string name="menuClearLog">Pastro regjistrimin tani</string>
|
||||
<string name="logs_cleared">Regjistrimet u pastruan me sukses</string>
|
||||
<string name="pid">PID: %1$d</string>
|
||||
<string name="target_uid">Target UID: %1$d</string>
|
||||
<string name="target_pid">Montoni PID të synuar ns: %s</string>
|
||||
<string name="target_uid">UID i synuar: %1$d</string>
|
||||
<string name="target_pid">PID i synuar: %s</string>
|
||||
<string name="selinux_context">Konteksti SELinux: %s</string>
|
||||
<string name="supp_group">Grupi suplementar: %s</string>
|
||||
<string name="supp_group">Grupi shtesë: %s</string>
|
||||
|
||||
<!--MagiskHide-->
|
||||
<string name="show_system_app">Shfaq aplikacionet e sistemit</string>
|
||||
<string name="show_os_app">Shfaq aplikacionet e sistemit operativ</string>
|
||||
<string name="hide_filter_hint">Kërko sipas emrit</string>
|
||||
<string name="show_os_app">Shfaq aplikacionet e OS</string>
|
||||
<string name="hide_filter_hint">Filtro sipas emrit</string>
|
||||
<string name="hide_search">Kërko</string>
|
||||
|
||||
<!--Module-->
|
||||
<string name="no_info_provided">(Nuk ka asnjë informacion)</string>
|
||||
<string name="reboot_userspace">Rinisje e shpejtë</string>
|
||||
<string name="reboot_recovery">Rinis te Recovery</string>
|
||||
<string name="reboot_bootloader">Rinis te Bootloader</string>
|
||||
<string name="reboot_download">Rinis te Download</string>
|
||||
<string name="reboot_edl">Rinis te EDL</string>
|
||||
<string name="reboot_safe_mode">Rinis në safe mode</string>
|
||||
<string name="no_info_provided">(Nuk u dha informacion)</string>
|
||||
<string name="reboot_userspace">Rinisje Normale</string>
|
||||
<string name="reboot_recovery">Rinis në Recovery</string>
|
||||
<string name="reboot_bootloader">Rinis në Bootloader</string>
|
||||
<string name="reboot_download">Rinis në Download</string>
|
||||
<string name="reboot_edl">Rinis në EDL</string>
|
||||
<string name="reboot_safe_mode">Mënyra e sigurt</string>
|
||||
<string name="module_version_author">%1$s nga %2$s</string>
|
||||
<string name="module_state_remove">Hiqe</string>
|
||||
<string name="module_action">Veprim</string>
|
||||
<string name="module_state_restore">Rikëthe</string>
|
||||
<string name="module_action_install_external">Instaloni nga sdcard</string>
|
||||
<string name="update_available">Përditësimi në dispozicion</string>
|
||||
<string name="suspend_text_riru">Moduli u pezullua sepse %1$s është aktivizuar</string>
|
||||
<string name="suspend_text_zygisk">Moduli është pezulluar sepse %1$s nuk është i aktivizuar</string>
|
||||
<string name="zygisk_module_unloaded">Moduli Zygisk nuk është ngarkuar për shkak të papajtueshmërisë</string>
|
||||
<string name="module_empty">Ska module të instaluar</string>
|
||||
<string name="module_action">Veprimi</string>
|
||||
<string name="module_state_restore">Rikthe</string>
|
||||
<string name="module_action_install_external">Instalo nga memoria</string>
|
||||
<string name="update_available">Përditësim i disponueshëm</string>
|
||||
<string name="suspend_text_riru">Moduli u pezullua sepse %1$s është aktiv</string>
|
||||
<string name="suspend_text_zygisk">Moduli u pezullua sepse %1$s nuk është aktiv</string>
|
||||
<string name="zygisk_module_unloaded">Moduli Zygisk nuk u ngarkua për shkak të mospërputhjes</string>
|
||||
<string name="module_empty">Nuk ka module të instaluara</string>
|
||||
<string name="confirm_install">Të instalohet moduli %1$s?</string>
|
||||
<string name="confirm_install_title">Konfirmo instalimin</string>
|
||||
<string name="confirm_install_title">Konfirmim instalimi</string>
|
||||
|
||||
<!--Settings-->
|
||||
<string name="settings_dark_mode_title">Mënyra e temës</string>
|
||||
<string name="settings_dark_mode_message">Zgjidhni mënyrën që i përshtatet më shumë stilit tuaj!</string>
|
||||
<string name="settings_dark_mode_light">Gjithmonë e bardhë</string>
|
||||
<string name="settings_dark_mode_system">Sipas sistemit</string>
|
||||
<string name="settings_dark_mode_dark">Gjithmonë e zezë</string>
|
||||
<string name="settings_download_path_title">Vendodhje e shkarkimit</string>
|
||||
<string name="settings_download_path_message">Shkarkimet do të ruhen në %1$s</string>
|
||||
<string name="settings_hide_app_title">Fsheh aplikacionin Magisk</string>
|
||||
<string name="settings_hide_app_summary">Instaloni një aplikacion përfaqësues me ID të paketës të rastësishme dhe etiketë të personalizuar të aplikacionitl</string>
|
||||
<string name="settings_restore_app_title">Rivendosni aplikacionin Magisk</string>
|
||||
<string name="settings_restore_app_summary">un-fsheh aplikacionin dhe riktheni atë në APK origjinale</string>
|
||||
<string name="settings_dark_mode_message">Zgjidh mënyrën që i përshtatet më shumë stilit tënd!</string>
|
||||
<string name="settings_dark_mode_light">Gjithmonë e ndritshme</string>
|
||||
<string name="settings_dark_mode_system">Ndiq sistemin</string>
|
||||
<string name="settings_dark_mode_dark">Gjithmonë e errët</string>
|
||||
<string name="settings_download_path_title">Rruga e shkarkimit</string>
|
||||
<string name="settings_download_path_message">Skedarët do të ruhen në %1$s</string>
|
||||
<string name="settings_hide_app_title">Fshi aplikacionin Magisk</string>
|
||||
<string name="settings_hide_app_summary">Instalo një aplikacion proxy me një ID pakete të rastësishme dhe emër të personalizuar</string>
|
||||
<string name="settings_restore_app_title">Rikthe aplikacionin Magisk</string>
|
||||
<string name="settings_restore_app_summary">Zbulo aplikacionin dhe rikthe APK-në origjinale</string>
|
||||
<string name="language">Gjuha</string>
|
||||
<string name="system_default">(Parazgjedhja e sistemit)</string>
|
||||
<string name="settings_check_update_title">Kontrollo për përditësime</string>
|
||||
<string name="settings_check_update_summary">Kontrolloni automatikisht për përditësime në sfond</string>
|
||||
<string name="settings_update_channel_title">Perditeso kanalin</string>
|
||||
<string name="settings_update_stable">E qëndrueshme</string>
|
||||
<string name="settings_check_update_summary">Kontrollo periodikisht për përditësimet në sfond</string>
|
||||
<string name="settings_update_channel_title">Kanal për përditësime</string>
|
||||
<string name="settings_update_stable">Stable</string>
|
||||
<string name="settings_update_beta">Beta</string>
|
||||
<string name="settings_update_custom">Kanal me porosi</string>
|
||||
<string name="settings_update_custom_msg">Fut një URL të personalizuar</string>
|
||||
<string name="settings_zygisk_summary">Drejtoni pjesë të Magisk në demonin zygote</string>
|
||||
<string name="settings_denylist_title">Zbato Listën e Mohimit</string>
|
||||
<string name="settings_denylist_summary">Proceset në listën e mohimit do të kenë të gjitha modifikimet e Magisk</string>
|
||||
<string name="settings_denylist_config_title">Konfiguro Listën e Mohimit</string>
|
||||
<string name="settings_denylist_config_summary">Zgjidhni proceset që do të përfshihen në listën e mohimit</string>
|
||||
<string name="settings_hosts_title">Pritësit pa sistem</string>
|
||||
<string name="settings_hosts_summary">Pritësit pa sistem mbështesin aplikacionet Adblock</string>
|
||||
<string name="settings_hosts_toast">Moduli i hosteve pa sistem u shtua</string>
|
||||
<string name="settings_app_name_hint">Emri i ri</string>
|
||||
<string name="settings_app_name_helper">Aplikacioni do të ripaketohet me këtë emër</string>
|
||||
<string name="settings_update_debug">Debug</string>
|
||||
<string name="settings_update_custom">Custom</string>
|
||||
<string name="settings_update_custom_msg">Fut një URL të personalizuar të kanalit</string>
|
||||
<string name="settings_zygisk_summary">Ekzekuto pjesë të Magisk në demonin Zygote</string>
|
||||
<string name="settings_denylist_title">Zbato listën e ndaluar</string>
|
||||
<string name="settings_denylist_summary">Proceset në listën e ndaluar do të rikthehen pa modifikimet e Magisk</string>
|
||||
<string name="settings_denylist_config_title">Konfiguro listën e ndaluar</string>
|
||||
<string name="settings_denylist_config_summary">Zgjidh proceset që do të përfshihen në listën e ndaluar</string>
|
||||
<string name="settings_hosts_title">Systemless hosts</string>
|
||||
<string name="settings_hosts_summary">Mbështetje për systemless hosts për aplikacionet që bllokojnë reklamat</string>
|
||||
<string name="settings_hosts_toast">U shtua moduli systemless hosts</string>
|
||||
<string name="settings_app_name_hint">Emër i ri</string>
|
||||
<string name="settings_app_name_helper">Aplikacioni do të ripaketizohet me këtë emër</string>
|
||||
<string name="settings_app_name_error">Format i pavlefshëm</string>
|
||||
<string name="settings_su_app_adb">Aplikacionet dhe ADB</string>
|
||||
<string name="settings_su_app">Vetëm aplikacionet</string>
|
||||
<string name="settings_su_adb">Vetëm ADB</string>
|
||||
<string name="settings_su_disable">Çaktivizuar</string>
|
||||
<string name="settings_su_request_10">10 Sekonda</string>
|
||||
<string name="settings_su_request_15">15 Sekonda</string>
|
||||
<string name="settings_su_request_20">20 Sekonda</string>
|
||||
<string name="settings_su_request_30">30 Sekonda</string>
|
||||
<string name="settings_su_request_45">45 Sekonda</string>
|
||||
<string name="settings_su_request_60">60 Sekonda</string>
|
||||
<string name="superuser_access">Aksesi i Super-përdorues</string>
|
||||
<string name="settings_su_request_10">10 sekonda</string>
|
||||
<string name="settings_su_request_15">15 sekonda</string>
|
||||
<string name="settings_su_request_20">20 sekonda</string>
|
||||
<string name="settings_su_request_30">30 sekonda</string>
|
||||
<string name="settings_su_request_45">45 sekonda</string>
|
||||
<string name="settings_su_request_60">60 sekonda</string>
|
||||
<string name="superuser_access">Akses Superuser</string>
|
||||
<string name="auto_response">Përgjigje automatike</string>
|
||||
<string name="request_timeout">Koha për mbarimit të Kërkesës</string>
|
||||
<string name="superuser_notification">Njoftimi i Super-përdoruesit</string>
|
||||
<string name="settings_su_reauth_title">Ri-vërtetimi pas azhurnimit</string>
|
||||
<string name="settings_su_reauth_summary">Ri-vërtetoni lejet e super-përdoruesit pas azhurnimit të aplikacionit</string>
|
||||
<string name="settings_su_tapjack_title">Aktivizo mbrojtjen tapjacking</string>
|
||||
<string name="settings_su_tapjack_summary">Dialogu i menjëhershëm i super-përdoruesit nuk do ti përgjigjet hyrjes ndërsa është i errësuar nga ndonjë dritare ose mbivendosje tjetër</string>
|
||||
<string name="request_timeout">Koha e skadimit të kërkesës</string>
|
||||
<string name="superuser_notification">Njoftimi Superuser</string>
|
||||
<string name="settings_su_reauth_title">Riautentifikimi pas përditësimit</string>
|
||||
<string name="settings_su_reauth_summary">Kërko sërish lejet Superuser pas përditësimit të aplikacioneve</string>
|
||||
<string name="settings_su_tapjack_title">Mbrojtje nga mbivendosja e klikimeve</string>
|
||||
<string name="settings_su_tapjack_summary">Dritarja e kërkesës Superuser nuk do të pranojë input kur është e mbuluar nga ndonjë dritare tjetër</string>
|
||||
<string name="settings_su_auth_title">Autentifikimi i përdoruesit</string>
|
||||
<string name="settings_su_auth_summary">Kërko autentifikim të përdoruesit gjatë kërkesave Superuser</string>
|
||||
<string name="settings_su_auth_insecure">Nuk ka asnjë metodë autentifikimi të konfiguruar në pajisje</string>
|
||||
<string name="settings_su_restrict_title">Kufizo aftësitë e root</string>
|
||||
<string name="settings_su_restrict_summary">Do të kufizojë aplikacionet e reja Superuser si parazgjedhje. Kujdes: kjo mund të prishë shumicën e aplikacioneve. Mos e aktivizoni nëse nuk dini çfarë bëni.</string>
|
||||
<string name="settings_customization">Personalizimi</string>
|
||||
<string name="setting_add_shortcut_summary">Shtoni një shkurtore mjaft të mirë në ekranin fillestar në rast se emri dhe ikona janë të vështira për tu njohur pasi keni fshehur aplikacionin</string>
|
||||
<string name="setting_add_shortcut_summary">Shto një shkurtore në ekranin bazë nëse emri/ikona bëhen të vështira për t’u dalluar pas fshehjes së aplikacionit</string>
|
||||
<string name="settings_doh_title">DNS mbi HTTPS</string>
|
||||
<string name="settings_doh_description">Helmimi i paqartë nga DNS në disa kombe</string>
|
||||
<string name="multiuser_mode">Mënyra Multi-përdoruesit</string>
|
||||
<string name="settings_owner_only">Vetëm pronari i paisjes</string>
|
||||
<string name="settings_owner_manage">Pronari i paisjes që e manaxhon</string>
|
||||
<string name="settings_user_independent">I pavarur nga përdoruesi</string>
|
||||
<string name="owner_only_summary">Vetëm pronari ka akses në rrënjë</string>
|
||||
<string name="owner_manage_summary">Vetëm pronari mund të menaxhojë aksesin në rrënjë dhe të marrë kërkesat</string>
|
||||
<string name="user_independent_summary">Çdo përdorues ka rregullat e veta të veçanta rrënjësore</string>
|
||||
<string name="mount_namespace_mode">Mënyra e Montimit të Hapësirës Emërore</string>
|
||||
<string name="settings_ns_global">Hapësira globale e emrave</string>
|
||||
<string name="settings_ns_requester">Trashëgoni hapësirën e emrave</string>
|
||||
<string name="settings_ns_isolate">Hapësira e izoluar e emrave</string>
|
||||
<string name="global_summary">Të gjitha sesionet rrënjë përdorin hapësirën globale të emrave të montimit</string>
|
||||
<string name="requester_summary">Seancat rrënjësore do të trashëgojnë hapësirën e emrave të kërkuesit të tyre</string>
|
||||
<string name="isolate_summary">Çdo sesion rrënjë do të ketë hapësirën e vet të izoluar të emrave</string>
|
||||
<string name="settings_su_auth_title">Vërtetimi i përdoruesit</string>
|
||||
<string name="settings_su_auth_summary">Kërkoni vërtetimin e përdoruesit gjatë kërkesave të Superpërdoruesit</string>
|
||||
<string name="settings_su_auth_insecure">Asnjë metodë vërtetimi nuk është konfiguruar në pajisje</string>
|
||||
|
||||
<string name="settings_doh_description">Zgjidhje për helmimin e DNS në disa shtete</string>
|
||||
<string name="settings_random_name_title">Emër i rastësishëm</string>
|
||||
<string name="settings_random_name_description">Rastësizo emrin e skedarit të daljes për imazhet e patch-uara dhe skedarët tar për të shmangur detektimin</string>
|
||||
<string name="multiuser_mode">Mënyra multi-përdorues</string>
|
||||
<string name="settings_owner_only">Vetëm pronari i pajisjes</string>
|
||||
<string name="settings_owner_manage">Menaxhuar nga pronari</string>
|
||||
<string name="settings_user_independent">I pavarur për përdoruesit</string>
|
||||
<string name="owner_only_summary">Vetëm pronari ka akses root</string>
|
||||
<string name="owner_manage_summary">Vetëm pronari mund të menaxhojë aksesin root dhe të marrë kërkesat</string>
|
||||
<string name="user_independent_summary">Çdo përdorues ka rregullat e veta të root</string>
|
||||
<string name="mount_namespace_mode">Mënyra e mount namespace</string>
|
||||
<string name="settings_ns_global">Namespace global</string>
|
||||
<string name="settings_ns_requester">Trashëgo namespace</string>
|
||||
<string name="settings_ns_isolate">Namespace i izoluar</string>
|
||||
<string name="global_summary">Të gjitha sesionet root përdorin namespace global</string>
|
||||
<string name="requester_summary">Sesioni root trashëgon namespace-in e kërkuesit</string>
|
||||
<string name="isolate_summary">Çdo sesion root do të ketë namespace të izoluar</string>
|
||||
|
||||
<!--Notifications-->
|
||||
<string name="update_channel">Përditësimet e magisk</string>
|
||||
<string name="updated_channel">Përditësimi përfundoi</string>
|
||||
<string name="update_channel">Përditësimet e Magisk</string>
|
||||
<string name="progress_channel">Njoftimet e progresit</string>
|
||||
<string name="updated_channel">Përditësimi përfundoi</string>
|
||||
<string name="download_complete">Shkarkimi përfundoi</string>
|
||||
<string name="download_file_error">Gabim në shkarkimin e skedarit</string>
|
||||
<string name="magisk_update_title">Përditësimi Magisk i disponueshëm!</string>
|
||||
<string name="download_file_error">Gabim gjatë shkarkimit të skedarit</string>
|
||||
<string name="magisk_update_title">Përditësim i ri i Magisk!</string>
|
||||
<string name="updated_title">Magisk u përditësua</string>
|
||||
<string name="updated_text">Prekni për të hapur aplikacionin</string>
|
||||
<string name="updated_text">Shtypni për të hapur aplikacionin</string>
|
||||
|
||||
<!--Toasts, Dialogs-->
|
||||
<string name="yes">Po</string>
|
||||
<string name="no">Jo</string>
|
||||
<string name="repo_install_title">Instalo %1$s %2$s(%3$d)</string>
|
||||
<string name="download">Shkarko</string>
|
||||
<string name="reboot">Rinis</string>
|
||||
<string name="close">Mbylle</string>
|
||||
<string name="release_notes">Shënimet e lëshimit</string>
|
||||
<string name="flashing">Duke flashuar…</string>
|
||||
<string name="running">Duke vepruar...</string>
|
||||
<string name="reboot">Rinise</string>
|
||||
<string name="close">Mbyll</string>
|
||||
<string name="release_notes">Shënimet e versionit</string>
|
||||
<string name="flashing">Duke flashuar..</string>
|
||||
<string name="running">Duke u ekzekutuar..</string>
|
||||
<string name="done">U krye!</string>
|
||||
<string name="done_action">Veprimi i ekzekutimit të %1$s u krye</string>
|
||||
<string name="done_action">Veprimi i %1$s u krye</string>
|
||||
<string name="failure">Dështoi!</string>
|
||||
<string name="hide_app_title">Fshehja e aplikacionit Magisk…</string>
|
||||
<string name="hide_app_title">Duke fshehur aplikacionin Magisk..</string>
|
||||
<string name="open_link_failed_toast">Nuk u gjet asnjë aplikacion për të hapur lidhjen</string>
|
||||
<string name="complete_uninstall">Çinstalimi i plotë</string>
|
||||
<string name="restore_img">Rivendosni imazhet</string>
|
||||
<string name="restore_img_msg">Duke rivendosur…</string>
|
||||
<string name="restore_done">Rivendosja u krye!</string>
|
||||
<string name="restore_fail">Rezervimi i aksioneve nuk ekziston!</string>
|
||||
<string name="complete_uninstall">Çinstalim i plotë</string>
|
||||
<string name="restore_img">Rikthe imazhet</string>
|
||||
<string name="restore_img_msg">Duke rikthyer..</string>
|
||||
<string name="restore_done">Rikthimi u krye!</string>
|
||||
<string name="restore_fail">Backup-i origjinal nuk ekziston!</string>
|
||||
<string name="setup_fail">Konfigurimi dështoi</string>
|
||||
<string name="env_fix_title">Kërkon Konfigurim shtesë</string>
|
||||
<string name="env_fix_msg">Pajisja juaj ka nevojë për konfigurim shtesë që Magisk të funksionojë siç duhet. Dëshironi të vazhdoni dhe rindizni?</string>
|
||||
<string name="env_full_fix_msg">Pajisja juaj ka nevojë për re-flashuar Magisk që të funksionojë siç duhet. Ju lutemi ri-instaloni Magisk brenda aplikacionit, modaliteti i rikuperimit nuk mund të marrë informacionin e saktë të pajisjes.</string>
|
||||
<string name="setup_msg">Konfigurimi i mjedisit të funksionimit…</string>
|
||||
<string name="unsupport_magisk_title">Version Magjik i Pambështetur</string>
|
||||
<string name="unsupport_magisk_msg">Ky version i aplikacionit nuk e mbështet versionin Magisk më të ulët se %1$s.\n\nAplikacioni do të sillet sikur të mos jetë i instaluar Magisk, ju lutemi azhurnoni Magisk sa më shpejt të jetë e mundur.</string>
|
||||
<string name="env_fix_title">Kërkohet konfigurim shtesë</string>
|
||||
<string name="env_fix_msg">Pajisja ka nevojë për konfigurim shtesë që Magisk të funksionojë si duhet. Dëshironi të vazhdoni dhe të rinisni pajisjen?</string>
|
||||
<string name="env_full_fix_msg">Pajisja ka nevojë për ri-flash të Magisk për të funksionuar saktë. Ju lutemi riinstaloni Magisk brenda aplikacionit; Recovery nuk mund të marrë informacionet e sakta të pajisjes.</string>
|
||||
<string name="setup_msg">Duke ekzekutuar konfigurimin e mjedisit..</string>
|
||||
<string name="unsupport_magisk_title">Version i Magisk i pambështetur</string>
|
||||
<string name="unsupport_magisk_msg">Ky version i aplikacionit nuk mbështet versione të Magisk më të ulëta se %1$s.
|
||||
|
||||
Aplikacioni do të sillet sikur Magisk nuk është i instaluar. Ju lutemi përditësoni Magisk sa më shpejt të jetë e mundur.</string>
|
||||
<string name="unsupport_general_title">Gjendje jonormale</string>
|
||||
<string name="unsupport_system_app_msg">Drejtimi i këtij aplikacioni si një aplikacion sistemi nuk mbështetet. Ju lutemi kthejeni aplikacionin në një aplikacion përdoruesi.</string>
|
||||
<string name="unsupport_other_su_msg">Një komandë \"su"\ që nuk i përket Magisk është zbuluar. Ju lutemi hiqni SU-në tjetër të pambështetur.</string>
|
||||
<string name="unsupport_external_storage_msg">Magisk është instaluar në ruajtjen e jashtme. Ju lutemi zhvendosni aplikacionin në ruajtjen e brendshme.</string>
|
||||
<string name="unsupport_nonroot_stub_msg">Aplikacioni nuk mund të vazhdojë të punojë në gjendjen e fshehur pasi rrënja ishte e humbur. Ju lutemi rivendoseni përsëri në APK-në origjinale.</string>
|
||||
<string name="unsupport_system_app_msg">Ekzekutimi i këtij aplikacioni si aplikacion sistemi nuk mbështetet. Ju lutemi kthejeni në aplikacion përdoruesi.</string>
|
||||
<string name="unsupport_other_su_msg">Është zbuluar një binar "su" që nuk është nga Magisk. Ju lutemi hiqni çdo zgjidhje tjetër root dhe/ose riinstaloni Magisk.</string>
|
||||
<string name="unsupport_external_storage_msg">Magisk është instaluar në memorien e jashtme. Lëvizni aplikacionin në memorien e brendshme.</string>
|
||||
<string name="unsupport_nonroot_stub_msg">Aplikacioni i fshehur i Magisk nuk mund të vazhdojë të funksionojë sepse root u humb. Ju lutemi riktheni APK-në origjinale.</string>
|
||||
<string name="unsupport_nonroot_stub_title">@string/settings_restore_app_title</string>
|
||||
<string name="external_rw_permission_denied">Jepni lejen e ruajtjes për të aktivizuar këtë funksion</string>
|
||||
<string name="external_rw_permission_denied">Jepni lejen e magazinimit për të aktivizuar këtë funksion</string>
|
||||
<string name="post_notifications_denied">Jepni lejen e njoftimeve për të aktivizuar këtë funksion</string>
|
||||
<string name="install_unknown_denied">Lejo "instaloni aplikacione të panjohura" për të aktivizuar këtë funksion</string>
|
||||
<string name="install_unknown_denied">Lejoni "Instalo aplikacione të panjohura" për të aktivizuar këtë funksion</string>
|
||||
<string name="add_shortcut_title">Shto shkurtore në ekranin bazë</string>
|
||||
<string name="add_shortcut_msg">Pas fshehjes së këtij aplikacioni, emri dhe ikona e tij mund të bëhen të vështira për tu njohur. Dëshironi të shtoni një shkurtore mjaft të bukur në ekranin bazë?</string>
|
||||
<string name="app_not_found">Asnjë aplikacion nuk u gjet për të trajtuar këtë veprim</string>
|
||||
<string name="add_shortcut_msg">Pas fshehjes së aplikacionit, emri dhe ikona mund të jenë të vështira për t’u njohur. Dëshironi të shtoni një shkurtore të bukur në ekranin bazë?</string>
|
||||
<string name="app_not_found">Nuk u gjet aplikacion për të kryer këtë veprim</string>
|
||||
<string name="reboot_apply_change">Rinisni për të aplikuar ndryshimet</string>
|
||||
<string name="restore_app_confirmation">Kjo do të rivendosë aplikacionin e fshehur në aplikacionin origjinal. A dëshironi vërtet ta bëni këtë?</string>
|
||||
<string name="restore_app_confirmation">Kjo do të rikthejë aplikacionin e fshehur në gjendjen origjinale. Jeni të sigurt që dëshironi ta bëni këtë?</string>
|
||||
|
||||
</resources>
|
||||
|
||||
@@ -24,10 +24,8 @@ org.gradle.caching=true
|
||||
kapt.use.k2=true
|
||||
|
||||
# Android
|
||||
android.useAndroidX=true
|
||||
android.injected.testOnly=false
|
||||
android.nonFinalResIds=false
|
||||
|
||||
# Magisk
|
||||
magisk.stubVersion=40
|
||||
magisk.versionCode=30400
|
||||
magisk.versionCode=30600
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
[versions]
|
||||
kotlin = "2.2.20"
|
||||
android = "8.13.0"
|
||||
ksp = "2.2.20-2.0.2"
|
||||
kotlin = "2.3.0"
|
||||
android = "9.0.0"
|
||||
ksp = "2.3.4"
|
||||
rikka = "1.3.0"
|
||||
navigation = "2.9.4"
|
||||
navigation = "2.9.6"
|
||||
libsu = "6.0.0"
|
||||
okhttp = "5.1.0"
|
||||
okhttp = "5.3.2"
|
||||
retrofit = "3.0.0"
|
||||
room = "2.8.0"
|
||||
room = "2.8.4"
|
||||
|
||||
[libraries]
|
||||
bcpkix = { module = "org.bouncycastle:bcpkix-jdk18on", version = "1.82" }
|
||||
bcpkix = { module = "org.bouncycastle:bcpkix-jdk18on", version = "1.83" }
|
||||
commons-compress = { module = "org.apache.commons:commons-compress", version = "1.28.0" }
|
||||
retrofit = { module = "com.squareup.retrofit2:retrofit", version.ref = "retrofit" }
|
||||
retrofit-moshi = { module = "com.squareup.retrofit2:converter-moshi", version.ref = "retrofit" }
|
||||
@@ -23,10 +23,10 @@ timber = { module = "com.jakewharton.timber:timber", version = "5.0.1" }
|
||||
jgit = { module = "org.eclipse.jgit:org.eclipse.jgit", version = "7.1.0.202411261347-r" }
|
||||
|
||||
# AndroidX
|
||||
activity = { module = "androidx.activity:activity", version = "1.11.0" }
|
||||
activity = { module = "androidx.activity:activity", version = "1.12.2" }
|
||||
appcompat = { module = "androidx.appcompat:appcompat", version = "1.7.1" }
|
||||
core-ktx = { module = "androidx.core:core-ktx", version = "1.17.0" }
|
||||
core-splashscreen = { module = "androidx.core:core-splashscreen", version = "1.0.1" }
|
||||
core-splashscreen = { module = "androidx.core:core-splashscreen", version = "1.2.0" }
|
||||
constraintlayout = { module = "androidx.constraintlayout:constraintlayout", version = "2.2.1" }
|
||||
fragment-ktx = { module = "androidx.fragment:fragment-ktx", version = "1.8.9" }
|
||||
navigation-fragment-ktx = { module = "androidx.navigation:navigation-fragment-ktx", version.ref = "navigation" }
|
||||
@@ -36,8 +36,8 @@ recyclerview = { module = "androidx.recyclerview:recyclerview", version = "1.4.0
|
||||
room-ktx = { module = "androidx.room:room-ktx", version.ref = "room" }
|
||||
room-runtime = { module = "androidx.room:room-runtime", version.ref = "room" }
|
||||
room-compiler = { module = "androidx.room:room-compiler", version.ref = "room" }
|
||||
swiperefreshlayout = { module = "androidx.swiperefreshlayout:swiperefreshlayout", version = "1.1.0" }
|
||||
transition = { module = "androidx.transition:transition", version = "1.6.0" }
|
||||
swiperefreshlayout = { module = "androidx.swiperefreshlayout:swiperefreshlayout", version = "1.2.0" }
|
||||
transition = { module = "androidx.transition:transition", version = "1.7.0" }
|
||||
collection-ktx = { module = "androidx.collection:collection-ktx", version = "1.5.0" }
|
||||
material = { module = "com.google.android.material:material", version = "1.13.0" }
|
||||
jdk-libs = { module = "com.android.tools:desugar_jdk_libs_nio", version = "2.1.5" }
|
||||
@@ -59,9 +59,10 @@ rikka-insets = { module = "dev.rikka.rikkax.insets:insets", version.ref = "rikka
|
||||
|
||||
# Build plugins
|
||||
android-gradle-plugin = { module = "com.android.tools.build:gradle", version.ref = "android" }
|
||||
android-kapt-plugin = { module = "com.android.legacy-kapt:com.android.legacy-kapt.gradle.plugin", version.ref = "android" }
|
||||
ksp-plugin = { module = "com.google.devtools.ksp:com.google.devtools.ksp.gradle.plugin", version.ref = "ksp" }
|
||||
navigation-safe-args-plugin = { module = "androidx.navigation:navigation-safe-args-gradle-plugin", version.ref = "navigation" }
|
||||
lsparanoid-plugin = { module = "org.lsposed.lsparanoid:gradle-plugin", version = "0.6.0" }
|
||||
moshi-plugin = { module = "dev.zacsweers.moshix:dev.zacsweers.moshix.gradle.plugin", version = "0.32.0" }
|
||||
moshi-plugin = { module = "dev.zacsweers.moshix:dev.zacsweers.moshix.gradle.plugin", version = "0.34.2" }
|
||||
|
||||
[plugins]
|
||||
|
||||
2
app/gradle/wrapper/gradle-wrapper.properties
vendored
2
app/gradle/wrapper/gradle-wrapper.properties
vendored
@@ -1,6 +1,6 @@
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-9.0.0-bin.zip
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-9.3.0-bin.zip
|
||||
networkTimeout=10000
|
||||
validateDistributionUrl=true
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
|
||||
1
app/shared/.gitignore
vendored
1
app/shared/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
/build
|
||||
@@ -6,4 +6,5 @@ setupCommon()
|
||||
|
||||
android {
|
||||
namespace = "com.topjohnwu.shared"
|
||||
enableKotlin = false
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
plugins {
|
||||
id("com.android.application")
|
||||
kotlin("android")
|
||||
}
|
||||
|
||||
android {
|
||||
|
||||
3
build.py
3
build.py
@@ -80,7 +80,7 @@ support_targets = {"magisk", "magiskinit", "magiskboot", "magiskpolicy", "resetp
|
||||
default_targets = support_targets - {"resetprop"}
|
||||
rust_targets = default_targets.copy()
|
||||
clean_targets = {"native", "cpp", "rust", "app"}
|
||||
ondk_version = "r29.2"
|
||||
ondk_version = "r29.4"
|
||||
|
||||
# Global vars
|
||||
config = {}
|
||||
@@ -406,6 +406,7 @@ def build_apk(module: str):
|
||||
gradlew,
|
||||
f"{module}:assemble{build_type}",
|
||||
f"-PconfigPath={props}",
|
||||
f"-PabiList={','.join(build_abis.keys())}",
|
||||
],
|
||||
env=env,
|
||||
)
|
||||
|
||||
@@ -1,5 +1,14 @@
|
||||
# Magisk Changelog
|
||||
|
||||
### v30.6 (2025.12.1)
|
||||
|
||||
- [MagiskInit] Revert a change that could result in bootloops
|
||||
|
||||
### v30.5 (2025.12.1)
|
||||
|
||||
- [General] Improve commandline argument parsing logic
|
||||
- [resetprop] Properly support Android versions with property overrides
|
||||
|
||||
### v30.4 (2025.10.2)
|
||||
|
||||
- [MagiskSU] Fix several implementation bugs
|
||||
|
||||
@@ -3,10 +3,20 @@
|
||||
# The actual compilation will have the target overriden by command-line.
|
||||
target = "aarch64-linux-android"
|
||||
# Enable cross language LTO, and explicitly set dwarf-version for ThinLTO
|
||||
rustflags = ["-Z", "dwarf-version=4", "-C", "linker-plugin-lto"]
|
||||
rustflags = [
|
||||
"-Z",
|
||||
"dwarf-version=4",
|
||||
"-C",
|
||||
"linker-plugin-lto",
|
||||
"-C",
|
||||
"force-unwind-tables=no",
|
||||
]
|
||||
target-dir = "../out/rust"
|
||||
|
||||
[unstable]
|
||||
build-std = ["std", "panic_abort"]
|
||||
build-std-features = ["panic_immediate_abort", "optimize_for_size"]
|
||||
build-std-features = ["optimize_for_size"]
|
||||
profile-rustflags = true
|
||||
|
||||
[profile.release]
|
||||
rustflags = ["-Z", "location-detail=none", "-Z", "fmt-debug=none"]
|
||||
|
||||
429
native/src/Cargo.lock
generated
429
native/src/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,8 @@
|
||||
cargo-features = ["panic-immediate-abort"]
|
||||
|
||||
[workspace]
|
||||
exclude = ["external"]
|
||||
members = ["base", "boot", "core", "core/derive", "init", "sepolicy"]
|
||||
members = ["base", "base/derive", "boot", "core", "init", "sepolicy"]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
@@ -8,57 +10,63 @@ version = "0.0.0"
|
||||
edition = "2024"
|
||||
|
||||
[workspace.dependencies]
|
||||
base = { path = "base" }
|
||||
derive = { path = "base/derive" }
|
||||
magiskpolicy = { path = "sepolicy" }
|
||||
cxx = { path = "external/cxx-rs" }
|
||||
cxx-gen = { path = "external/cxx-rs/gen/lib" }
|
||||
libc = "0.2.176"
|
||||
cfg-if = "1.0.3"
|
||||
libc = "0.2.178"
|
||||
cfg-if = "1.0.4"
|
||||
num-traits = "0.2.19"
|
||||
num-derive = "0.4.2"
|
||||
thiserror = "2.0.16"
|
||||
thiserror = "2.0.17"
|
||||
byteorder = "1.5.0"
|
||||
size = "0.5.0"
|
||||
bytemuck = "1.23.2"
|
||||
bytemuck = "1.24.0"
|
||||
fdt = "0.1.5"
|
||||
const_format = "0.2.34"
|
||||
const_format = "0.2.35"
|
||||
bit-set = "0.8.0"
|
||||
syn = "2.0.106"
|
||||
quote = "1.0.40"
|
||||
proc-macro2 = "1.0.101"
|
||||
argh = { version = "0.1.13", default-features = false }
|
||||
syn = "2.0.111"
|
||||
quote = "1.0.42"
|
||||
proc-macro2 = "1.0.103"
|
||||
pb-rs = { version = "0.10.0", default-features = false }
|
||||
quick-protobuf = "0.8.1"
|
||||
flate2 = { version = "1.1.2", default-features = false }
|
||||
bzip2 = "0.6.0"
|
||||
zopfli = "0.8.2"
|
||||
flate2 = { version = "1.1.5", default-features = false }
|
||||
bzip2 = "0.6.1"
|
||||
zopfli = "0.8.3"
|
||||
lz4 = "1.28.1"
|
||||
lzma-rust2 = { version = "0.14.2", default-features = false }
|
||||
lzma-rust2 = { version = "0.15.4", default-features = false }
|
||||
nix = "0.30.1"
|
||||
bitflags = "2.9.4"
|
||||
bitflags = "2.10.0"
|
||||
|
||||
# Rust crypto crates are tied together
|
||||
sha1 = "0.11.0-rc.2"
|
||||
sha2 = "0.11.0-rc.2"
|
||||
digest = "0.11.0-rc.2"
|
||||
p256 = "0.14.0-pre.11"
|
||||
p384 = "0.14.0-pre.11"
|
||||
p521 = "0.14.0-pre.11"
|
||||
rsa = "0.10.0-rc.8"
|
||||
sha1 = "0.11.0-rc.3"
|
||||
sha2 = "0.11.0-rc.3"
|
||||
digest = "0.11.0-rc.4"
|
||||
p256 = "0.14.0-rc.1"
|
||||
p384 = "0.14.0-rc.1"
|
||||
p521 = "0.14.0-rc.1"
|
||||
rsa = "0.10.0-rc.10"
|
||||
x509-cert = "0.3.0-rc.2"
|
||||
der = "0.8.0-rc.9"
|
||||
der = "0.8.0-rc.10"
|
||||
|
||||
[patch.crates-io]
|
||||
pb-rs = { git = "https://github.com/tafia/quick-protobuf.git" }
|
||||
quick-protobuf = { git = "https://github.com/tafia/quick-protobuf.git" }
|
||||
pb-rs = { git = "https://github.com/topjohnwu/quick-protobuf.git" }
|
||||
quick-protobuf = { git = "https://github.com/topjohnwu/quick-protobuf.git" }
|
||||
lz4-sys = { path = "external/lz4-sys" }
|
||||
|
||||
[workspace.lints.clippy]
|
||||
unwrap_used = "deny"
|
||||
|
||||
[profile.dev]
|
||||
opt-level = "z"
|
||||
lto = "thin"
|
||||
panic = "abort"
|
||||
panic = "immediate-abort"
|
||||
debug = "none"
|
||||
|
||||
[profile.release]
|
||||
opt-level = "z"
|
||||
lto = "fat"
|
||||
codegen-units = 1
|
||||
panic = "abort"
|
||||
panic = "immediate-abort"
|
||||
strip = true
|
||||
|
||||
@@ -9,15 +9,18 @@ path = "lib.rs"
|
||||
[features]
|
||||
selinux = []
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[build-dependencies]
|
||||
cxx-gen = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
derive = { workspace = true }
|
||||
cxx = { workspace = true }
|
||||
libc = { workspace = true }
|
||||
cfg-if = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
argh = { workspace = true }
|
||||
bytemuck = { workspace = true }
|
||||
num-traits = { workspace = true }
|
||||
num-derive = { workspace = true }
|
||||
|
||||
1226
native/src/base/argh.rs
Normal file
1226
native/src/base/argh.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,13 +1,9 @@
|
||||
#include <sys/types.h>
|
||||
#include <sys/wait.h>
|
||||
#include <sys/prctl.h>
|
||||
#include <sys/mman.h>
|
||||
#include <android/log.h>
|
||||
#include <fcntl.h>
|
||||
#include <unistd.h>
|
||||
#include <linux/fs.h>
|
||||
#include <syscall.h>
|
||||
#include <random>
|
||||
#include <string>
|
||||
|
||||
#include <base.hpp>
|
||||
#include <flags.h>
|
||||
@@ -18,6 +14,12 @@ using namespace std;
|
||||
#define __call_bypassing_fortify(fn) (&fn)
|
||||
#endif
|
||||
|
||||
#ifdef __LP64__
|
||||
static_assert(BLKGETSIZE64 == 0x80081272);
|
||||
#else
|
||||
static_assert(BLKGETSIZE64 == 0x80041272);
|
||||
#endif
|
||||
|
||||
// Override libc++ new implementation to optimize final build size
|
||||
|
||||
void* operator new(std::size_t s) { return std::malloc(s); }
|
||||
@@ -29,32 +31,19 @@ void* operator new[](std::size_t s, const std::nothrow_t&) noexcept { return std
|
||||
void operator delete(void *p, const std::nothrow_t&) noexcept { std::free(p); }
|
||||
void operator delete[](void *p, const std::nothrow_t&) noexcept { std::free(p); }
|
||||
|
||||
bool byte_view::contains(byte_view pattern) const {
|
||||
return _buf != nullptr && memmem(_buf, _sz, pattern._buf, pattern._sz) != nullptr;
|
||||
}
|
||||
|
||||
bool byte_view::operator==(byte_view rhs) const {
|
||||
return _sz == rhs._sz && memcmp(_buf, rhs._buf, _sz) == 0;
|
||||
}
|
||||
|
||||
void byte_data::swap(byte_data &o) {
|
||||
std::swap(_buf, o._buf);
|
||||
std::swap(_sz, o._sz);
|
||||
}
|
||||
|
||||
rust::Vec<size_t> byte_data::patch(byte_view from, byte_view to) const {
|
||||
rust::Vec<size_t> v;
|
||||
if (_buf == nullptr)
|
||||
if (ptr == nullptr)
|
||||
return v;
|
||||
auto p = _buf;
|
||||
auto eof = _buf + _sz;
|
||||
auto p = ptr;
|
||||
auto eof = ptr + sz;
|
||||
while (p < eof) {
|
||||
p = static_cast<uint8_t *>(memmem(p, eof - p, from.data(), from.size()));
|
||||
if (p == nullptr)
|
||||
return v;
|
||||
memset(p, 0, from.size());
|
||||
memcpy(p, to.data(), to.size());
|
||||
v.push_back(p - _buf);
|
||||
v.push_back(p - ptr);
|
||||
p += from.size();
|
||||
}
|
||||
return v;
|
||||
@@ -374,30 +363,34 @@ sFILE make_file(FILE *fp) {
|
||||
mmap_data::mmap_data(const char *name, bool rw) {
|
||||
auto slice = rust::map_file(name, rw);
|
||||
if (!slice.empty()) {
|
||||
_buf = slice.data();
|
||||
_sz = slice.size();
|
||||
this->ptr = slice.data();
|
||||
this->sz = slice.size();
|
||||
}
|
||||
}
|
||||
|
||||
mmap_data::mmap_data(int dirfd, const char *name, bool rw) {
|
||||
auto slice = rust::map_file_at(dirfd, name, rw);
|
||||
if (!slice.empty()) {
|
||||
_buf = slice.data();
|
||||
_sz = slice.size();
|
||||
this->ptr = slice.data();
|
||||
this->sz = slice.size();
|
||||
}
|
||||
}
|
||||
|
||||
mmap_data::mmap_data(int fd, size_t sz, bool rw) {
|
||||
auto slice = rust::map_fd(fd, sz, rw);
|
||||
if (!slice.empty()) {
|
||||
_buf = slice.data();
|
||||
_sz = slice.size();
|
||||
this->ptr = slice.data();
|
||||
this->sz = slice.size();
|
||||
}
|
||||
}
|
||||
|
||||
mmap_data::~mmap_data() {
|
||||
if (_buf)
|
||||
munmap(_buf, _sz);
|
||||
if (ptr) munmap(ptr, sz);
|
||||
}
|
||||
|
||||
void mmap_data::swap(mmap_data &o) {
|
||||
std::swap(ptr, o.ptr);
|
||||
std::swap(sz, o.sz);
|
||||
}
|
||||
|
||||
string resolve_preinit_dir(const char *base_dir) {
|
||||
@@ -420,7 +413,7 @@ extern "C" void cxx$utf8str$new(Utf8CStr *self, const void *s, size_t len);
|
||||
extern "C" const char *cxx$utf8str$ptr(const Utf8CStr *self);
|
||||
extern "C" size_t cxx$utf8str$len(const Utf8CStr *self);
|
||||
|
||||
Utf8CStr::Utf8CStr(const char *s, size_t len) {
|
||||
Utf8CStr::Utf8CStr(const char *s, size_t len) : repr{} {
|
||||
cxx$utf8str$new(this, s, len);
|
||||
}
|
||||
|
||||
|
||||
@@ -4,14 +4,14 @@ use std::fs::File;
|
||||
use std::io::BufReader;
|
||||
use std::mem::ManuallyDrop;
|
||||
use std::ops::DerefMut;
|
||||
use std::os::fd::{BorrowedFd, FromRawFd, OwnedFd, RawFd};
|
||||
use std::os::fd::{BorrowedFd, FromRawFd, RawFd};
|
||||
|
||||
use crate::ffi::{FnBoolStr, FnBoolStrStr};
|
||||
use crate::files::map_file_at;
|
||||
pub(crate) use crate::xwrap::*;
|
||||
use crate::{
|
||||
BufReadExt, Directory, LoggedResult, ResultExt, Utf8CStr, clone_attr, cstr, fclone_attr,
|
||||
map_fd, map_file, slice_from_ptr,
|
||||
BufReadExt, ResultExt, Utf8CStr, clone_attr, cstr, fclone_attr, map_fd, map_file,
|
||||
slice_from_ptr,
|
||||
};
|
||||
use cfg_if::cfg_if;
|
||||
use libc::{c_char, mode_t};
|
||||
@@ -52,14 +52,6 @@ unsafe extern "C" fn rm_rf_for_cxx(path: *const c_char) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
unsafe extern "C" fn frm_rf(fd: OwnedFd) -> bool {
|
||||
fn inner(fd: OwnedFd) -> LoggedResult<()> {
|
||||
Directory::try_from(fd)?.remove_all()
|
||||
}
|
||||
inner(fd).is_ok()
|
||||
}
|
||||
|
||||
pub(crate) fn map_file_for_cxx(path: &Utf8CStr, rw: bool) -> &'static mut [u8] {
|
||||
map_file(path, rw).log().unwrap_or(&mut [])
|
||||
}
|
||||
|
||||
185
native/src/base/derive/argh/errors.rs
Normal file
185
native/src/base/derive/argh/errors.rs
Normal file
@@ -0,0 +1,185 @@
|
||||
// Copyright (c) 2020 Google LLC All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::ToTokens;
|
||||
use std::cell::RefCell;
|
||||
|
||||
/// A type for collecting procedural macro errors.
|
||||
#[derive(Default)]
|
||||
pub struct Errors {
|
||||
errors: RefCell<Vec<syn::Error>>,
|
||||
}
|
||||
|
||||
/// Produce functions to expect particular literals in `syn::Expr`
|
||||
macro_rules! expect_lit_fn {
|
||||
($(($fn_name:ident, $syn_type:ident, $variant:ident, $lit_name:literal),)*) => {
|
||||
$(
|
||||
pub fn $fn_name<'a>(&self, e: &'a syn::Expr) -> Option<&'a syn::$syn_type> {
|
||||
if let syn::Expr::Lit(syn::ExprLit { lit: syn::Lit::$variant(inner), .. }) = e {
|
||||
Some(inner)
|
||||
} else {
|
||||
self.unexpected_lit($lit_name, e);
|
||||
None
|
||||
}
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
/// Produce functions to expect particular variants of `syn::Meta`
|
||||
macro_rules! expect_meta_fn {
|
||||
($(($fn_name:ident, $syn_type:ident, $variant:ident, $meta_name:literal),)*) => {
|
||||
$(
|
||||
pub fn $fn_name<'a>(&self, meta: &'a syn::Meta) -> Option<&'a syn::$syn_type> {
|
||||
if let syn::Meta::$variant(inner) = meta {
|
||||
Some(inner)
|
||||
} else {
|
||||
self.unexpected_meta($meta_name, meta);
|
||||
None
|
||||
}
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
impl Errors {
|
||||
/// Issue an error like:
|
||||
///
|
||||
/// Duplicate foo attribute
|
||||
/// First foo attribute here
|
||||
pub fn duplicate_attrs(
|
||||
&self,
|
||||
attr_kind: &str,
|
||||
first: &impl syn::spanned::Spanned,
|
||||
second: &impl syn::spanned::Spanned,
|
||||
) {
|
||||
self.duplicate_attrs_inner(attr_kind, first.span(), second.span())
|
||||
}
|
||||
|
||||
fn duplicate_attrs_inner(&self, attr_kind: &str, first: Span, second: Span) {
|
||||
self.err_span(second, &["Duplicate ", attr_kind, " attribute"].concat());
|
||||
self.err_span(first, &["First ", attr_kind, " attribute here"].concat());
|
||||
}
|
||||
|
||||
expect_lit_fn![
|
||||
(expect_lit_str, LitStr, Str, "string"),
|
||||
(expect_lit_char, LitChar, Char, "character"),
|
||||
(expect_lit_int, LitInt, Int, "integer"),
|
||||
];
|
||||
|
||||
expect_meta_fn![
|
||||
(expect_meta_word, Path, Path, "path"),
|
||||
(expect_meta_list, MetaList, List, "list"),
|
||||
(
|
||||
expect_meta_name_value,
|
||||
MetaNameValue,
|
||||
NameValue,
|
||||
"name-value pair"
|
||||
),
|
||||
];
|
||||
|
||||
fn unexpected_lit(&self, expected: &str, found: &syn::Expr) {
|
||||
fn lit_kind(lit: &syn::Lit) -> &'static str {
|
||||
use syn::Lit::{Bool, Byte, ByteStr, Char, Float, Int, Str, Verbatim};
|
||||
match lit {
|
||||
Str(_) => "string",
|
||||
ByteStr(_) => "bytestring",
|
||||
Byte(_) => "byte",
|
||||
Char(_) => "character",
|
||||
Int(_) => "integer",
|
||||
Float(_) => "float",
|
||||
Bool(_) => "boolean",
|
||||
Verbatim(_) => "unknown (possibly extra-large integer)",
|
||||
_ => "unknown literal kind",
|
||||
}
|
||||
}
|
||||
|
||||
if let syn::Expr::Lit(syn::ExprLit { lit, .. }) = found {
|
||||
self.err(
|
||||
found,
|
||||
&[
|
||||
"Expected ",
|
||||
expected,
|
||||
" literal, found ",
|
||||
lit_kind(lit),
|
||||
" literal",
|
||||
]
|
||||
.concat(),
|
||||
)
|
||||
} else {
|
||||
self.err(
|
||||
found,
|
||||
&[
|
||||
"Expected ",
|
||||
expected,
|
||||
" literal, found non-literal expression.",
|
||||
]
|
||||
.concat(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn unexpected_meta(&self, expected: &str, found: &syn::Meta) {
|
||||
fn meta_kind(meta: &syn::Meta) -> &'static str {
|
||||
use syn::Meta::{List, NameValue, Path};
|
||||
match meta {
|
||||
Path(_) => "path",
|
||||
List(_) => "list",
|
||||
NameValue(_) => "name-value pair",
|
||||
}
|
||||
}
|
||||
|
||||
self.err(
|
||||
found,
|
||||
&[
|
||||
"Expected ",
|
||||
expected,
|
||||
" attribute, found ",
|
||||
meta_kind(found),
|
||||
" attribute",
|
||||
]
|
||||
.concat(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Issue an error relating to a particular `Spanned` structure.
|
||||
pub fn err(&self, spanned: &impl syn::spanned::Spanned, msg: &str) {
|
||||
self.err_span(spanned.span(), msg);
|
||||
}
|
||||
|
||||
/// Issue an error relating to a particular `Span`.
|
||||
pub fn err_span(&self, span: Span, msg: &str) {
|
||||
self.push(syn::Error::new(span, msg));
|
||||
}
|
||||
|
||||
/// Issue an error spanning over the given syntax tree node.
|
||||
pub fn err_span_tokens<T: ToTokens>(&self, tokens: T, msg: &str) {
|
||||
self.push(syn::Error::new_spanned(tokens, msg));
|
||||
}
|
||||
|
||||
/// Push a `syn::Error` onto the list of errors to issue.
|
||||
pub fn push(&self, err: syn::Error) {
|
||||
self.errors.borrow_mut().push(err);
|
||||
}
|
||||
|
||||
/// Convert a `syn::Result` to an `Option`, logging the error if present.
|
||||
pub fn ok<T>(&self, r: syn::Result<T>) -> Option<T> {
|
||||
match r {
|
||||
Ok(v) => Some(v),
|
||||
Err(e) => {
|
||||
self.push(e);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Errors {
|
||||
/// Convert the errors into tokens that, when emit, will cause
|
||||
/// the user of the macro to receive compiler errors.
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.extend(self.errors.borrow().iter().map(|e| e.to_compile_error()));
|
||||
}
|
||||
}
|
||||
912
native/src/base/derive/argh/mod.rs
Normal file
912
native/src/base/derive/argh/mod.rs
Normal file
@@ -0,0 +1,912 @@
|
||||
// Copyright (c) 2020 Google LLC All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
use syn::ext::IdentExt as _;
|
||||
|
||||
/// Implementation of the `FromArgs` and `argh(...)` derive attributes.
|
||||
///
|
||||
/// For more thorough documentation, see the `argh` crate itself.
|
||||
extern crate proc_macro;
|
||||
|
||||
use errors::Errors;
|
||||
use parse_attrs::{FieldAttrs, FieldKind, TypeAttrs, check_long_name};
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{ToTokens, quote, quote_spanned};
|
||||
use std::collections::HashMap;
|
||||
use std::str::FromStr;
|
||||
use syn::spanned::Spanned;
|
||||
use syn::{GenericArgument, LitStr, PathArguments, Type};
|
||||
|
||||
mod errors;
|
||||
mod parse_attrs;
|
||||
|
||||
/// Transform the input into a token stream containing any generated implementations,
|
||||
/// as well as all errors that occurred.
|
||||
pub(crate) fn impl_from_args(input: &syn::DeriveInput) -> TokenStream {
|
||||
let errors = &Errors::default();
|
||||
let type_attrs = &TypeAttrs::parse(errors, input);
|
||||
let mut output_tokens = match &input.data {
|
||||
syn::Data::Struct(ds) => {
|
||||
impl_from_args_struct(errors, &input.ident, type_attrs, &input.generics, ds)
|
||||
}
|
||||
syn::Data::Enum(de) => {
|
||||
impl_from_args_enum(errors, &input.ident, type_attrs, &input.generics, de)
|
||||
}
|
||||
syn::Data::Union(_) => {
|
||||
errors.err(input, "`#[derive(FromArgs)]` cannot be applied to unions");
|
||||
TokenStream::new()
|
||||
}
|
||||
};
|
||||
errors.to_tokens(&mut output_tokens);
|
||||
output_tokens
|
||||
}
|
||||
|
||||
/// The kind of optionality a parameter has.
|
||||
enum Optionality {
|
||||
None,
|
||||
Defaulted(TokenStream),
|
||||
Optional,
|
||||
Repeating,
|
||||
DefaultedRepeating(TokenStream),
|
||||
}
|
||||
|
||||
impl PartialEq<Optionality> for Optionality {
|
||||
fn eq(&self, other: &Optionality) -> bool {
|
||||
use Optionality::*;
|
||||
// NB: (Defaulted, Defaulted) can't contain the same token streams
|
||||
matches!((self, other), (Optional, Optional) | (Repeating, Repeating))
|
||||
}
|
||||
}
|
||||
|
||||
impl Optionality {
|
||||
/// Whether or not this is `Optionality::None`
|
||||
fn is_required(&self) -> bool {
|
||||
matches!(self, Optionality::None)
|
||||
}
|
||||
}
|
||||
|
||||
/// A field of a `#![derive(FromArgs)]` struct with attributes and some other
|
||||
/// notable metadata appended.
|
||||
struct StructField<'a> {
|
||||
/// The original parsed field
|
||||
field: &'a syn::Field,
|
||||
/// The parsed attributes of the field
|
||||
attrs: FieldAttrs,
|
||||
/// The field name. This is contained optionally inside `field`,
|
||||
/// but is duplicated non-optionally here to indicate that all field that
|
||||
/// have reached this point must have a field name, and it no longer
|
||||
/// needs to be unwrapped.
|
||||
name: &'a syn::Ident,
|
||||
/// Similar to `name` above, this is contained optionally inside `FieldAttrs`,
|
||||
/// but here is fully present to indicate that we only have to consider fields
|
||||
/// with a valid `kind` at this point.
|
||||
kind: FieldKind,
|
||||
// If `field.ty` is `Vec<T>` or `Option<T>`, this is `T`, otherwise it's `&field.ty`.
|
||||
// This is used to enable consistent parsing code between optional and non-optional
|
||||
// keyed and subcommand fields.
|
||||
ty_without_wrapper: &'a syn::Type,
|
||||
// Whether the field represents an optional value, such as an `Option` subcommand field
|
||||
// or an `Option` or `Vec` keyed argument, or if it has a `default`.
|
||||
optionality: Optionality,
|
||||
// The `--`-prefixed name of the option, if one exists.
|
||||
long_name: Option<String>,
|
||||
}
|
||||
|
||||
impl<'a> StructField<'a> {
|
||||
/// Attempts to parse a field of a `#[derive(FromArgs)]` struct, pulling out the
|
||||
/// fields required for code generation.
|
||||
fn new(errors: &Errors, field: &'a syn::Field, attrs: FieldAttrs) -> Option<Self> {
|
||||
let name = field.ident.as_ref().expect("missing ident for named field");
|
||||
|
||||
// Ensure that one "kind" is present (switch, option, subcommand, positional)
|
||||
let kind = if let Some(field_type) = &attrs.field_type {
|
||||
field_type.kind
|
||||
} else {
|
||||
errors.err(
|
||||
field,
|
||||
concat!(
|
||||
"Missing `argh` field kind attribute.\n",
|
||||
"Expected one of: `switch`, `option`, `remaining`, `subcommand`, `positional`",
|
||||
),
|
||||
);
|
||||
return None;
|
||||
};
|
||||
|
||||
// Parse out whether a field is optional (`Option` or `Vec`).
|
||||
let optionality;
|
||||
let ty_without_wrapper;
|
||||
match kind {
|
||||
FieldKind::Switch => {
|
||||
if !ty_expect_switch(errors, &field.ty) {
|
||||
return None;
|
||||
}
|
||||
optionality = Optionality::Optional;
|
||||
ty_without_wrapper = &field.ty;
|
||||
}
|
||||
FieldKind::Option | FieldKind::Positional => {
|
||||
if let Some(default) = &attrs.default {
|
||||
let tokens = match TokenStream::from_str(&default.value()) {
|
||||
Ok(tokens) => tokens,
|
||||
Err(_) => {
|
||||
errors.err(&default, "Invalid tokens: unable to lex `default` value");
|
||||
return None;
|
||||
}
|
||||
};
|
||||
// Set the span of the generated tokens to the string literal
|
||||
let tokens: TokenStream = tokens
|
||||
.into_iter()
|
||||
.map(|mut tree| {
|
||||
tree.set_span(default.span());
|
||||
tree
|
||||
})
|
||||
.collect();
|
||||
let inner = if let Some(x) = ty_inner(&["Vec"], &field.ty) {
|
||||
optionality = Optionality::DefaultedRepeating(tokens);
|
||||
x
|
||||
} else {
|
||||
optionality = Optionality::Defaulted(tokens);
|
||||
&field.ty
|
||||
};
|
||||
ty_without_wrapper = inner;
|
||||
} else {
|
||||
let mut inner = None;
|
||||
optionality = if let Some(x) = ty_inner(&["Option"], &field.ty) {
|
||||
inner = Some(x);
|
||||
Optionality::Optional
|
||||
} else if let Some(x) = ty_inner(&["Vec"], &field.ty) {
|
||||
inner = Some(x);
|
||||
Optionality::Repeating
|
||||
} else {
|
||||
Optionality::None
|
||||
};
|
||||
ty_without_wrapper = inner.unwrap_or(&field.ty);
|
||||
}
|
||||
}
|
||||
FieldKind::SubCommand => {
|
||||
let inner = ty_inner(&["Option"], &field.ty);
|
||||
optionality = if inner.is_some() {
|
||||
Optionality::Optional
|
||||
} else {
|
||||
Optionality::None
|
||||
};
|
||||
ty_without_wrapper = inner.unwrap_or(&field.ty);
|
||||
}
|
||||
}
|
||||
|
||||
// Determine the "long" name of options and switches.
|
||||
// Defaults to the kebab-cased field name if `#[argh(long = "...")]` is omitted.
|
||||
// If `#[argh(long = none)]` is explicitly set, no long name will be set.
|
||||
let long_name = match kind {
|
||||
FieldKind::Switch | FieldKind::Option => {
|
||||
let long_name = match &attrs.long {
|
||||
None => {
|
||||
let kebab_name = to_kebab_case(&name.unraw().to_string());
|
||||
check_long_name(errors, name, &kebab_name);
|
||||
Some(kebab_name)
|
||||
}
|
||||
Some(None) => None,
|
||||
Some(Some(long)) => Some(long.value()),
|
||||
}
|
||||
.map(|long_name| {
|
||||
if long_name == "help" {
|
||||
errors.err(field, "Custom `--help` flags are not supported.");
|
||||
}
|
||||
format!("--{}", long_name)
|
||||
});
|
||||
if let (None, None) = (&attrs.short, &long_name) {
|
||||
errors.err(field, "At least one of `short` or `long` has to be set.")
|
||||
};
|
||||
long_name
|
||||
}
|
||||
FieldKind::SubCommand | FieldKind::Positional => None,
|
||||
};
|
||||
|
||||
Some(StructField {
|
||||
field,
|
||||
attrs,
|
||||
kind,
|
||||
optionality,
|
||||
ty_without_wrapper,
|
||||
name,
|
||||
long_name,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn positional_arg_name(&self) -> String {
|
||||
self.attrs
|
||||
.arg_name
|
||||
.as_ref()
|
||||
.map(LitStr::value)
|
||||
.unwrap_or_else(|| self.name.to_string().trim_matches('_').to_owned())
|
||||
}
|
||||
|
||||
fn option_arg_name(&self) -> String {
|
||||
match (&self.attrs.short, &self.long_name) {
|
||||
(None, None) => unreachable!("short and long cannot both be None"),
|
||||
(Some(short), None) => format!("-{}", short.value()),
|
||||
(None, Some(long)) => long.clone(),
|
||||
(Some(short), Some(long)) => format!("-{},{long}", short.value()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn to_kebab_case(s: &str) -> String {
|
||||
let words = s.split('_').filter(|word| !word.is_empty());
|
||||
let mut res = String::with_capacity(s.len());
|
||||
for word in words {
|
||||
if !res.is_empty() {
|
||||
res.push('-')
|
||||
}
|
||||
res.push_str(word)
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
/// Implements `FromArgs` and `TopLevelCommand` or `SubCommand` for a `#[derive(FromArgs)]` struct.
|
||||
fn impl_from_args_struct(
|
||||
errors: &Errors,
|
||||
name: &syn::Ident,
|
||||
type_attrs: &TypeAttrs,
|
||||
generic_args: &syn::Generics,
|
||||
ds: &syn::DataStruct,
|
||||
) -> TokenStream {
|
||||
let fields = match &ds.fields {
|
||||
syn::Fields::Named(fields) => fields,
|
||||
syn::Fields::Unnamed(_) => {
|
||||
errors.err(
|
||||
&ds.struct_token,
|
||||
"`#![derive(FromArgs)]` is not currently supported on tuple structs",
|
||||
);
|
||||
return TokenStream::new();
|
||||
}
|
||||
syn::Fields::Unit => {
|
||||
errors.err(
|
||||
&ds.struct_token,
|
||||
"#![derive(FromArgs)]` cannot be applied to unit structs",
|
||||
);
|
||||
return TokenStream::new();
|
||||
}
|
||||
};
|
||||
|
||||
let fields: Vec<_> = fields
|
||||
.named
|
||||
.iter()
|
||||
.filter_map(|field| {
|
||||
let attrs = FieldAttrs::parse(errors, field);
|
||||
StructField::new(errors, field, attrs)
|
||||
})
|
||||
.collect();
|
||||
|
||||
ensure_unique_names(errors, &fields);
|
||||
ensure_only_trailing_positionals_are_optional(errors, &fields);
|
||||
|
||||
let impl_span = Span::call_site();
|
||||
|
||||
let from_args_method = impl_from_args_struct_from_args(errors, type_attrs, &fields);
|
||||
|
||||
let top_or_sub_cmd_impl = top_or_sub_cmd_impl(errors, name, type_attrs, generic_args);
|
||||
|
||||
let (impl_generics, ty_generics, where_clause) = generic_args.split_for_impl();
|
||||
let trait_impl = quote_spanned! { impl_span =>
|
||||
#[automatically_derived]
|
||||
impl #impl_generics argh::FromArgs for #name #ty_generics #where_clause {
|
||||
#from_args_method
|
||||
}
|
||||
|
||||
#top_or_sub_cmd_impl
|
||||
};
|
||||
|
||||
trait_impl
|
||||
}
|
||||
|
||||
fn impl_from_args_struct_from_args<'a>(
|
||||
errors: &Errors,
|
||||
type_attrs: &TypeAttrs,
|
||||
fields: &'a [StructField<'a>],
|
||||
) -> TokenStream {
|
||||
let init_fields = declare_local_storage_for_from_args_fields(fields);
|
||||
let unwrap_fields = unwrap_from_args_fields(fields);
|
||||
let positional_fields: Vec<&StructField<'_>> = fields
|
||||
.iter()
|
||||
.filter(|field| field.kind == FieldKind::Positional)
|
||||
.collect();
|
||||
let positional_field_idents = positional_fields.iter().map(|field| &field.field.ident);
|
||||
let positional_field_names = positional_fields.iter().map(|field| field.name.to_string());
|
||||
let last_positional_is_repeating = positional_fields
|
||||
.last()
|
||||
.map(|field| field.optionality == Optionality::Repeating)
|
||||
.unwrap_or(false);
|
||||
let last_positional_is_greedy = positional_fields
|
||||
.last()
|
||||
.map(|field| field.kind == FieldKind::Positional && field.attrs.greedy.is_some())
|
||||
.unwrap_or(false);
|
||||
|
||||
let flag_output_table = fields.iter().filter_map(|field| {
|
||||
let field_name = &field.field.ident;
|
||||
match field.kind {
|
||||
FieldKind::Option => Some(quote! { argh::ParseStructOption::Value(&mut #field_name) }),
|
||||
FieldKind::Switch => Some(quote! { argh::ParseStructOption::Flag(&mut #field_name) }),
|
||||
FieldKind::SubCommand | FieldKind::Positional => None,
|
||||
}
|
||||
});
|
||||
|
||||
let flag_str_to_output_table_map = flag_str_to_output_table_map_entries(fields);
|
||||
|
||||
let mut subcommands_iter = fields
|
||||
.iter()
|
||||
.filter(|field| field.kind == FieldKind::SubCommand)
|
||||
.fuse();
|
||||
|
||||
let subcommand: Option<&StructField<'_>> = subcommands_iter.next();
|
||||
for dup_subcommand in subcommands_iter {
|
||||
errors.duplicate_attrs(
|
||||
"subcommand",
|
||||
subcommand.unwrap().field,
|
||||
dup_subcommand.field,
|
||||
);
|
||||
}
|
||||
|
||||
let impl_span = Span::call_site();
|
||||
|
||||
let missing_requirements_ident = syn::Ident::new("__missing_requirements", impl_span);
|
||||
|
||||
let append_missing_requirements =
|
||||
append_missing_requirements(&missing_requirements_ident, fields);
|
||||
|
||||
let parse_subcommands = if let Some(subcommand) = subcommand {
|
||||
let name = subcommand.name;
|
||||
let ty = subcommand.ty_without_wrapper;
|
||||
quote_spanned! { impl_span =>
|
||||
Some(argh::ParseStructSubCommand {
|
||||
subcommands: <#ty as argh::SubCommands>::COMMANDS,
|
||||
dynamic_subcommands: &<#ty as argh::SubCommands>::dynamic_commands(),
|
||||
parse_func: &mut |__command, __remaining_args| {
|
||||
#name = Some(<#ty as argh::FromArgs>::from_args(__command, __remaining_args)?);
|
||||
Ok(())
|
||||
},
|
||||
})
|
||||
}
|
||||
} else {
|
||||
quote_spanned! { impl_span => None }
|
||||
};
|
||||
|
||||
let help_triggers = get_help_triggers(type_attrs);
|
||||
|
||||
let method_impl = quote_spanned! { impl_span =>
|
||||
fn from_args(__cmd_name: &[&str], __args: &[&str])
|
||||
-> std::result::Result<Self, argh::EarlyExit>
|
||||
{
|
||||
#![allow(clippy::unwrap_in_result)]
|
||||
|
||||
#( #init_fields )*
|
||||
|
||||
argh::parse_struct_args(
|
||||
__cmd_name,
|
||||
__args,
|
||||
argh::ParseStructOptions {
|
||||
arg_to_slot: &[ #( #flag_str_to_output_table_map ,)* ],
|
||||
slots: &mut [ #( #flag_output_table, )* ],
|
||||
help_triggers: &[ #( #help_triggers ),* ],
|
||||
},
|
||||
argh::ParseStructPositionals {
|
||||
positionals: &mut [
|
||||
#(
|
||||
argh::ParseStructPositional {
|
||||
name: #positional_field_names,
|
||||
slot: &mut #positional_field_idents as &mut dyn argh::ParseValueSlot,
|
||||
},
|
||||
)*
|
||||
],
|
||||
last_is_repeating: #last_positional_is_repeating,
|
||||
last_is_greedy: #last_positional_is_greedy,
|
||||
},
|
||||
#parse_subcommands,
|
||||
)?;
|
||||
|
||||
let mut #missing_requirements_ident = argh::MissingRequirements::default();
|
||||
#(
|
||||
#append_missing_requirements
|
||||
)*
|
||||
#missing_requirements_ident.err_on_any()?;
|
||||
|
||||
Ok(Self {
|
||||
#( #unwrap_fields, )*
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
method_impl
|
||||
}
|
||||
|
||||
/// get help triggers vector from type_attrs.help_triggers as a [`Vec<String>`]
|
||||
///
|
||||
/// Defaults to vec!["-h", "--help"] if type_attrs.help_triggers is None
|
||||
fn get_help_triggers(type_attrs: &TypeAttrs) -> Vec<String> {
|
||||
if type_attrs.is_subcommand.is_some() {
|
||||
// Subcommands should never have any help triggers
|
||||
Vec::new()
|
||||
} else {
|
||||
type_attrs.help_triggers.as_ref().map_or_else(
|
||||
|| vec!["-h".to_string(), "--help".to_string()],
|
||||
|s| {
|
||||
s.iter()
|
||||
.filter_map(|s| {
|
||||
let trigger = s.value();
|
||||
let trigger_trimmed = trigger.trim().to_owned();
|
||||
if trigger_trimmed.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(trigger_trimmed)
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Ensures that only trailing positional args are non-required.
|
||||
fn ensure_only_trailing_positionals_are_optional(errors: &Errors, fields: &[StructField<'_>]) {
|
||||
let mut first_non_required_span = None;
|
||||
for field in fields {
|
||||
if field.kind == FieldKind::Positional {
|
||||
if let Some(first) = first_non_required_span
|
||||
&& field.optionality.is_required()
|
||||
{
|
||||
errors.err_span(
|
||||
first,
|
||||
"Only trailing positional arguments may be `Option`, `Vec`, or defaulted.",
|
||||
);
|
||||
errors.err(
|
||||
&field.field,
|
||||
"Later non-optional positional argument declared here.",
|
||||
);
|
||||
return;
|
||||
}
|
||||
if !field.optionality.is_required() {
|
||||
first_non_required_span = Some(field.field.span());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Ensures that only one short or long name is used.
|
||||
fn ensure_unique_names(errors: &Errors, fields: &[StructField<'_>]) {
|
||||
let mut seen_short_names = HashMap::new();
|
||||
let mut seen_long_names = HashMap::new();
|
||||
|
||||
for field in fields {
|
||||
if let Some(short_name) = &field.attrs.short {
|
||||
let short_name = short_name.value();
|
||||
if let Some(first_use_field) = seen_short_names.get(&short_name) {
|
||||
errors.err_span_tokens(
|
||||
first_use_field,
|
||||
&format!(
|
||||
"The short name of \"-{}\" was already used here.",
|
||||
short_name
|
||||
),
|
||||
);
|
||||
errors.err_span_tokens(field.field, "Later usage here.");
|
||||
}
|
||||
|
||||
seen_short_names.insert(short_name, &field.field);
|
||||
}
|
||||
|
||||
if let Some(long_name) = &field.long_name {
|
||||
if let Some(first_use_field) = seen_long_names.get(&long_name) {
|
||||
errors.err_span_tokens(
|
||||
*first_use_field,
|
||||
&format!("The long name of \"{}\" was already used here.", long_name),
|
||||
);
|
||||
errors.err_span_tokens(field.field, "Later usage here.");
|
||||
}
|
||||
|
||||
seen_long_names.insert(long_name, field.field);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Implement `argh::TopLevelCommand` or `argh::SubCommand` as appropriate.
|
||||
fn top_or_sub_cmd_impl(
|
||||
errors: &Errors,
|
||||
name: &syn::Ident,
|
||||
type_attrs: &TypeAttrs,
|
||||
generic_args: &syn::Generics,
|
||||
) -> TokenStream {
|
||||
let description = String::new();
|
||||
let (impl_generics, ty_generics, where_clause) = generic_args.split_for_impl();
|
||||
if type_attrs.is_subcommand.is_none() {
|
||||
// Not a subcommand
|
||||
quote! {
|
||||
#[automatically_derived]
|
||||
impl #impl_generics argh::TopLevelCommand for #name #ty_generics #where_clause {}
|
||||
}
|
||||
} else {
|
||||
let empty_str = syn::LitStr::new("", Span::call_site());
|
||||
let subcommand_name = type_attrs.name.as_ref().unwrap_or_else(|| {
|
||||
errors.err(
|
||||
name,
|
||||
"`#[argh(name = \"...\")]` attribute is required for subcommands",
|
||||
);
|
||||
&empty_str
|
||||
});
|
||||
quote! {
|
||||
#[automatically_derived]
|
||||
impl #impl_generics argh::SubCommand for #name #ty_generics #where_clause {
|
||||
const COMMAND: &'static argh::CommandInfo = &argh::CommandInfo {
|
||||
name: #subcommand_name,
|
||||
description: #description,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Declare a local slots to store each field in during parsing.
|
||||
///
|
||||
/// Most fields are stored in `Option<FieldType>` locals.
|
||||
/// `argh(option)` fields are stored in a `ParseValueSlotTy` along with a
|
||||
/// function that knows how to decode the appropriate value.
|
||||
fn declare_local_storage_for_from_args_fields<'a>(
|
||||
fields: &'a [StructField<'a>],
|
||||
) -> impl Iterator<Item = TokenStream> + 'a {
|
||||
fields.iter().map(|field| {
|
||||
let field_name = &field.field.ident;
|
||||
let field_type = &field.ty_without_wrapper;
|
||||
|
||||
// Wrap field types in `Option` if they aren't already `Option` or `Vec`-wrapped.
|
||||
let field_slot_type = match field.optionality {
|
||||
Optionality::Optional | Optionality::Repeating => (&field.field.ty).into_token_stream(),
|
||||
Optionality::None | Optionality::Defaulted(_) => {
|
||||
quote! { std::option::Option<#field_type> }
|
||||
}
|
||||
Optionality::DefaultedRepeating(_) => {
|
||||
quote! { std::option::Option<std::vec::Vec<#field_type>> }
|
||||
}
|
||||
};
|
||||
|
||||
match field.kind {
|
||||
FieldKind::Option | FieldKind::Positional => {
|
||||
let from_str_fn = match &field.attrs.from_str_fn {
|
||||
Some(from_str_fn) => from_str_fn.into_token_stream(),
|
||||
None => {
|
||||
quote! {
|
||||
<#field_type as argh::FromArgValue>::from_arg_value
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
quote! {
|
||||
let mut #field_name: argh::ParseValueSlotTy<#field_slot_type, #field_type>
|
||||
= argh::ParseValueSlotTy {
|
||||
slot: std::default::Default::default(),
|
||||
parse_func: |_, value| { #from_str_fn(value) },
|
||||
};
|
||||
}
|
||||
}
|
||||
FieldKind::SubCommand => {
|
||||
quote! { let mut #field_name: #field_slot_type = None; }
|
||||
}
|
||||
FieldKind::Switch => {
|
||||
quote! { let mut #field_name: #field_slot_type = argh::Flag::default(); }
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Unwrap non-optional fields and take options out of their tuple slots.
|
||||
fn unwrap_from_args_fields<'a>(
|
||||
fields: &'a [StructField<'a>],
|
||||
) -> impl Iterator<Item = TokenStream> + 'a {
|
||||
fields.iter().map(|field| {
|
||||
let field_name = field.name;
|
||||
match field.kind {
|
||||
FieldKind::Option | FieldKind::Positional => match &field.optionality {
|
||||
Optionality::None => quote! {
|
||||
#field_name: #field_name.slot.unwrap()
|
||||
},
|
||||
Optionality::Optional | Optionality::Repeating => {
|
||||
quote! { #field_name: #field_name.slot }
|
||||
}
|
||||
Optionality::Defaulted(tokens) | Optionality::DefaultedRepeating(tokens) => {
|
||||
quote! {
|
||||
#field_name: #field_name.slot.unwrap_or_else(|| #tokens)
|
||||
}
|
||||
}
|
||||
},
|
||||
FieldKind::Switch => field_name.into_token_stream(),
|
||||
FieldKind::SubCommand => match field.optionality {
|
||||
Optionality::None => quote! { #field_name: #field_name.unwrap() },
|
||||
Optionality::Optional | Optionality::Repeating => field_name.into_token_stream(),
|
||||
Optionality::Defaulted(_) | Optionality::DefaultedRepeating(_) => unreachable!(),
|
||||
},
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Entries of tokens like `("--some-flag-key", 5)` that map from a flag key string
|
||||
/// to an index in the output table.
|
||||
fn flag_str_to_output_table_map_entries<'a>(fields: &'a [StructField<'a>]) -> Vec<TokenStream> {
|
||||
let mut flag_str_to_output_table_map = vec![];
|
||||
|
||||
for (i, field) in fields.iter().enumerate() {
|
||||
if let Some(short) = &field.attrs.short {
|
||||
let short = format!("-{}", short.value());
|
||||
flag_str_to_output_table_map.push(quote! { (#short, #i) });
|
||||
}
|
||||
if let Some(long) = &field.long_name {
|
||||
flag_str_to_output_table_map.push(quote! { (#long, #i) });
|
||||
}
|
||||
}
|
||||
flag_str_to_output_table_map
|
||||
}
|
||||
|
||||
/// For each non-optional field, add an entry to the `argh::MissingRequirements`.
|
||||
fn append_missing_requirements<'a>(
|
||||
// missing_requirements_ident
|
||||
mri: &syn::Ident,
|
||||
fields: &'a [StructField<'a>],
|
||||
) -> impl Iterator<Item = TokenStream> + 'a {
|
||||
let mri = mri.clone();
|
||||
fields
|
||||
.iter()
|
||||
.filter(|f| f.optionality.is_required())
|
||||
.map(move |field| {
|
||||
let field_name = field.name;
|
||||
match field.kind {
|
||||
FieldKind::Switch => unreachable!("switches are always optional"),
|
||||
FieldKind::Positional => {
|
||||
let name = field.positional_arg_name();
|
||||
quote! {
|
||||
if #field_name.slot.is_none() {
|
||||
#mri.missing_positional_arg(#name)
|
||||
}
|
||||
}
|
||||
}
|
||||
FieldKind::Option => {
|
||||
let name = field.option_arg_name();
|
||||
quote! {
|
||||
if #field_name.slot.is_none() {
|
||||
#mri.missing_option(#name)
|
||||
}
|
||||
}
|
||||
}
|
||||
FieldKind::SubCommand => {
|
||||
let ty = field.ty_without_wrapper;
|
||||
quote! {
|
||||
if #field_name.is_none() {
|
||||
#mri.missing_subcommands(
|
||||
<#ty as argh::SubCommands>::COMMANDS
|
||||
.iter()
|
||||
.cloned()
|
||||
.chain(
|
||||
<#ty as argh::SubCommands>::dynamic_commands()
|
||||
.iter()
|
||||
.copied()
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Require that a type can be a `switch`.
|
||||
/// Throws an error for all types except booleans and integers
|
||||
fn ty_expect_switch(errors: &Errors, ty: &syn::Type) -> bool {
|
||||
fn ty_can_be_switch(ty: &syn::Type) -> bool {
|
||||
if let syn::Type::Path(path) = ty {
|
||||
if path.qself.is_some() {
|
||||
return false;
|
||||
}
|
||||
if path.path.segments.len() != 1 {
|
||||
return false;
|
||||
}
|
||||
let ident = &path.path.segments[0].ident;
|
||||
// `Option<bool>` can be used as a `switch`.
|
||||
if ident == "Option"
|
||||
&& let PathArguments::AngleBracketed(args) = &path.path.segments[0].arguments
|
||||
&& let GenericArgument::Type(Type::Path(p)) = &args.args[0]
|
||||
&& p.path.segments[0].ident == "bool"
|
||||
{
|
||||
return true;
|
||||
}
|
||||
[
|
||||
"bool", "u8", "u16", "u32", "u64", "u128", "i8", "i16", "i32", "i64", "i128",
|
||||
]
|
||||
.iter()
|
||||
.any(|path| ident == path)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
let res = ty_can_be_switch(ty);
|
||||
if !res {
|
||||
errors.err(
|
||||
ty,
|
||||
"switches must be of type `bool`, `Option<bool>`, or integer type",
|
||||
);
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
/// Returns `Some(T)` if a type is `wrapper_name<T>` for any `wrapper_name` in `wrapper_names`.
|
||||
fn ty_inner<'a>(wrapper_names: &[&str], ty: &'a syn::Type) -> Option<&'a syn::Type> {
|
||||
if let syn::Type::Path(path) = ty {
|
||||
if path.qself.is_some() {
|
||||
return None;
|
||||
}
|
||||
// Since we only check the last path segment, it isn't necessarily the case that
|
||||
// we're referring to `std::vec::Vec` or `std::option::Option`, but there isn't
|
||||
// a fool proof way to check these since name resolution happens after macro expansion,
|
||||
// so this is likely "good enough" (so long as people don't have their own types called
|
||||
// `Option` or `Vec` that take one generic parameter they're looking to parse).
|
||||
let last_segment = path.path.segments.last()?;
|
||||
if !wrapper_names.iter().any(|name| last_segment.ident == *name) {
|
||||
return None;
|
||||
}
|
||||
if let syn::PathArguments::AngleBracketed(gen_args) = &last_segment.arguments {
|
||||
let generic_arg = gen_args.args.first()?;
|
||||
if let syn::GenericArgument::Type(ty) = &generic_arg {
|
||||
return Some(ty);
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Implements `FromArgs` and `SubCommands` for a `#![derive(FromArgs)]` enum.
|
||||
fn impl_from_args_enum(
|
||||
errors: &Errors,
|
||||
name: &syn::Ident,
|
||||
type_attrs: &TypeAttrs,
|
||||
generic_args: &syn::Generics,
|
||||
de: &syn::DataEnum,
|
||||
) -> TokenStream {
|
||||
parse_attrs::check_enum_type_attrs(errors, type_attrs, &de.enum_token.span);
|
||||
|
||||
// An enum variant like `<name>(<ty>)`
|
||||
struct SubCommandVariant<'a> {
|
||||
name: &'a syn::Ident,
|
||||
ty: &'a syn::Type,
|
||||
}
|
||||
|
||||
let mut dynamic_type_and_variant = None;
|
||||
|
||||
let variants: Vec<SubCommandVariant<'_>> = de
|
||||
.variants
|
||||
.iter()
|
||||
.filter_map(|variant| {
|
||||
let name = &variant.ident;
|
||||
let ty = enum_only_single_field_unnamed_variants(errors, &variant.fields)?;
|
||||
if parse_attrs::VariantAttrs::parse(errors, variant)
|
||||
.is_dynamic
|
||||
.is_some()
|
||||
{
|
||||
if dynamic_type_and_variant.is_some() {
|
||||
errors.err(variant, "Only one variant can have the `dynamic` attribute");
|
||||
}
|
||||
dynamic_type_and_variant = Some((ty, name));
|
||||
None
|
||||
} else {
|
||||
Some(SubCommandVariant { name, ty })
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let name_repeating = std::iter::repeat(name.clone());
|
||||
let variant_ty = variants.iter().map(|x| x.ty).collect::<Vec<_>>();
|
||||
let variant_names = variants.iter().map(|x| x.name).collect::<Vec<_>>();
|
||||
let dynamic_from_args =
|
||||
dynamic_type_and_variant
|
||||
.as_ref()
|
||||
.map(|(dynamic_type, dynamic_variant)| {
|
||||
quote! {
|
||||
if let Some(result) = <#dynamic_type as argh::DynamicSubCommand>::try_from_args(
|
||||
command_name, args) {
|
||||
return result.map(#name::#dynamic_variant);
|
||||
}
|
||||
}
|
||||
});
|
||||
let dynamic_commands = dynamic_type_and_variant.as_ref().map(|(dynamic_type, _)| {
|
||||
quote! {
|
||||
fn dynamic_commands() -> &'static [&'static argh::CommandInfo] {
|
||||
<#dynamic_type as argh::DynamicSubCommand>::commands()
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let (impl_generics, ty_generics, where_clause) = generic_args.split_for_impl();
|
||||
quote! {
|
||||
impl #impl_generics argh::FromArgs for #name #ty_generics #where_clause {
|
||||
fn from_args(command_name: &[&str], args: &[&str])
|
||||
-> std::result::Result<Self, argh::EarlyExit>
|
||||
{
|
||||
let subcommand_name = if let Some(subcommand_name) = command_name.last() {
|
||||
*subcommand_name
|
||||
} else {
|
||||
return Err(argh::EarlyExit::from("no subcommand name".to_owned()));
|
||||
};
|
||||
|
||||
#(
|
||||
if subcommand_name == <#variant_ty as argh::SubCommand>::COMMAND.name {
|
||||
return Ok(#name_repeating::#variant_names(
|
||||
<#variant_ty as argh::FromArgs>::from_args(command_name, args)?
|
||||
));
|
||||
}
|
||||
)*
|
||||
|
||||
#dynamic_from_args
|
||||
|
||||
Err(argh::EarlyExit::from("no subcommand matched".to_owned()))
|
||||
}
|
||||
}
|
||||
|
||||
impl #impl_generics argh::SubCommands for #name #ty_generics #where_clause {
|
||||
const COMMANDS: &'static [&'static argh::CommandInfo] = &[#(
|
||||
<#variant_ty as argh::SubCommand>::COMMAND,
|
||||
)*];
|
||||
|
||||
#dynamic_commands
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `Some(Bar)` if the field is a single-field unnamed variant like `Foo(Bar)`.
|
||||
/// Otherwise, generates an error.
|
||||
fn enum_only_single_field_unnamed_variants<'a>(
|
||||
errors: &Errors,
|
||||
variant_fields: &'a syn::Fields,
|
||||
) -> Option<&'a syn::Type> {
|
||||
macro_rules! with_enum_suggestion {
|
||||
($help_text:literal) => {
|
||||
concat!(
|
||||
$help_text,
|
||||
"\nInstead, use a variant with a single unnamed field for each subcommand:\n",
|
||||
" enum MyCommandEnum {\n",
|
||||
" SubCommandOne(SubCommandOne),\n",
|
||||
" SubCommandTwo(SubCommandTwo),\n",
|
||||
" }",
|
||||
)
|
||||
};
|
||||
}
|
||||
|
||||
match variant_fields {
|
||||
syn::Fields::Named(fields) => {
|
||||
errors.err(
|
||||
fields,
|
||||
with_enum_suggestion!(
|
||||
"`#![derive(FromArgs)]` `enum`s do not support variants with named fields."
|
||||
),
|
||||
);
|
||||
None
|
||||
}
|
||||
syn::Fields::Unit => {
|
||||
errors.err(
|
||||
variant_fields,
|
||||
with_enum_suggestion!(
|
||||
"`#![derive(FromArgs)]` does not support `enum`s with no variants."
|
||||
),
|
||||
);
|
||||
None
|
||||
}
|
||||
syn::Fields::Unnamed(fields) => {
|
||||
if fields.unnamed.len() != 1 {
|
||||
errors.err(
|
||||
fields,
|
||||
with_enum_suggestion!(
|
||||
"`#![derive(FromArgs)]` `enum` variants must only contain one field."
|
||||
),
|
||||
);
|
||||
None
|
||||
} else {
|
||||
// `unwrap` is okay because of the length check above.
|
||||
let first_field = fields.unnamed.first().unwrap();
|
||||
Some(&first_field.ty)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
688
native/src/base/derive/argh/parse_attrs.rs
Normal file
688
native/src/base/derive/argh/parse_attrs.rs
Normal file
@@ -0,0 +1,688 @@
|
||||
// Copyright (c) 2020 Google LLC All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
use syn::parse::Parser;
|
||||
use syn::punctuated::Punctuated;
|
||||
|
||||
use super::errors::Errors;
|
||||
use proc_macro2::Span;
|
||||
use std::collections::hash_map::{Entry, HashMap};
|
||||
|
||||
/// Attributes applied to a field of a `#![derive(FromArgs)]` struct.
|
||||
#[derive(Default)]
|
||||
pub struct FieldAttrs {
|
||||
pub default: Option<syn::LitStr>,
|
||||
pub description: Option<Description>,
|
||||
pub from_str_fn: Option<syn::ExprPath>,
|
||||
pub field_type: Option<FieldType>,
|
||||
pub long: Option<Option<syn::LitStr>>,
|
||||
pub short: Option<syn::LitChar>,
|
||||
pub arg_name: Option<syn::LitStr>,
|
||||
pub greedy: Option<syn::Path>,
|
||||
pub hidden_help: bool,
|
||||
}
|
||||
|
||||
/// The purpose of a particular field on a `#![derive(FromArgs)]` struct.
|
||||
#[derive(Copy, Clone, Eq, PartialEq)]
|
||||
pub enum FieldKind {
|
||||
/// Switches are booleans that are set to "true" by passing the flag.
|
||||
Switch,
|
||||
/// Options are `--key value`. They may be optional (using `Option`),
|
||||
/// or repeating (using `Vec`), or required (neither `Option` nor `Vec`)
|
||||
Option,
|
||||
/// Subcommand fields (of which there can be at most one) refer to enums
|
||||
/// containing one of several potential subcommands. They may be optional
|
||||
/// (using `Option`) or required (no `Option`).
|
||||
SubCommand,
|
||||
/// Positional arguments are parsed literally if the input
|
||||
/// does not begin with `-` or `--` and is not a subcommand.
|
||||
/// They are parsed in declaration order, and only the last positional
|
||||
/// argument in a type may be an `Option`, `Vec`, or have a default value.
|
||||
Positional,
|
||||
}
|
||||
|
||||
/// The type of a field on a `#![derive(FromArgs)]` struct.
|
||||
///
|
||||
/// This is a simple wrapper around `FieldKind` which includes the `syn::Ident`
|
||||
/// of the attribute containing the field kind.
|
||||
pub struct FieldType {
|
||||
pub kind: FieldKind,
|
||||
pub ident: syn::Ident,
|
||||
}
|
||||
|
||||
/// A description of a `#![derive(FromArgs)]` struct.
|
||||
///
|
||||
/// Defaults to the docstring if one is present, or `#[argh(description = "...")]`
|
||||
/// if one is provided.
|
||||
pub struct Description {
|
||||
/// Whether the description was an explicit annotation or whether it was a doc string.
|
||||
pub explicit: bool,
|
||||
pub content: syn::LitStr,
|
||||
}
|
||||
|
||||
impl FieldAttrs {
|
||||
pub fn parse(errors: &Errors, field: &syn::Field) -> Self {
|
||||
let mut this = Self::default();
|
||||
|
||||
for attr in &field.attrs {
|
||||
if is_doc_attr(attr) {
|
||||
parse_attr_doc(errors, attr, &mut this.description);
|
||||
continue;
|
||||
}
|
||||
|
||||
let ml = if let Some(ml) = argh_attr_to_meta_list(errors, attr) {
|
||||
ml
|
||||
} else {
|
||||
continue;
|
||||
};
|
||||
|
||||
for meta in ml {
|
||||
let name = meta.path();
|
||||
if name.is_ident("arg_name") {
|
||||
if let Some(m) = errors.expect_meta_name_value(&meta) {
|
||||
this.parse_attr_arg_name(errors, m);
|
||||
}
|
||||
} else if name.is_ident("default") {
|
||||
if let Some(m) = errors.expect_meta_name_value(&meta) {
|
||||
this.parse_attr_default(errors, m);
|
||||
}
|
||||
} else if name.is_ident("description") {
|
||||
if let Some(m) = errors.expect_meta_name_value(&meta) {
|
||||
parse_attr_description(errors, m, &mut this.description);
|
||||
}
|
||||
} else if name.is_ident("from_str_fn") {
|
||||
if let Some(m) = errors.expect_meta_list(&meta) {
|
||||
this.parse_attr_from_str_fn(errors, m);
|
||||
}
|
||||
} else if name.is_ident("long") {
|
||||
if let Some(m) = errors.expect_meta_name_value(&meta) {
|
||||
this.parse_attr_long(errors, m);
|
||||
}
|
||||
} else if name.is_ident("option") {
|
||||
parse_attr_field_type(errors, &meta, FieldKind::Option, &mut this.field_type);
|
||||
} else if name.is_ident("short") {
|
||||
if let Some(m) = errors.expect_meta_name_value(&meta) {
|
||||
this.parse_attr_short(errors, m);
|
||||
}
|
||||
} else if name.is_ident("subcommand") {
|
||||
parse_attr_field_type(
|
||||
errors,
|
||||
&meta,
|
||||
FieldKind::SubCommand,
|
||||
&mut this.field_type,
|
||||
);
|
||||
} else if name.is_ident("switch") {
|
||||
parse_attr_field_type(errors, &meta, FieldKind::Switch, &mut this.field_type);
|
||||
} else if name.is_ident("positional") {
|
||||
parse_attr_field_type(
|
||||
errors,
|
||||
&meta,
|
||||
FieldKind::Positional,
|
||||
&mut this.field_type,
|
||||
);
|
||||
} else if name.is_ident("greedy") {
|
||||
this.greedy = Some(name.clone());
|
||||
} else if name.is_ident("hidden_help") {
|
||||
this.hidden_help = true;
|
||||
} else {
|
||||
errors.err(
|
||||
&meta,
|
||||
concat!(
|
||||
"Invalid field-level `argh` attribute\n",
|
||||
"Expected one of: `arg_name`, `default`, `description`, `from_str_fn`, `greedy`, ",
|
||||
"`long`, `option`, `short`, `subcommand`, `switch`, `hidden_help`",
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let (Some(default), Some(field_type)) = (&this.default, &this.field_type) {
|
||||
match field_type.kind {
|
||||
FieldKind::Option | FieldKind::Positional => {}
|
||||
FieldKind::SubCommand | FieldKind::Switch => errors.err(
|
||||
default,
|
||||
"`default` may only be specified on `#[argh(option)]` \
|
||||
or `#[argh(positional)]` fields",
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
match (&this.greedy, this.field_type.as_ref().map(|f| f.kind)) {
|
||||
(Some(_), Some(FieldKind::Positional)) => {}
|
||||
(Some(greedy), Some(_)) => errors.err(
|
||||
&greedy,
|
||||
"`greedy` may only be specified on `#[argh(positional)]` \
|
||||
fields",
|
||||
),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if let Some(d) = &this.description {
|
||||
check_option_description(errors, d.content.value().trim(), d.content.span());
|
||||
}
|
||||
|
||||
this
|
||||
}
|
||||
|
||||
fn parse_attr_from_str_fn(&mut self, errors: &Errors, m: &syn::MetaList) {
|
||||
parse_attr_fn_name(errors, m, "from_str_fn", &mut self.from_str_fn)
|
||||
}
|
||||
|
||||
fn parse_attr_default(&mut self, errors: &Errors, m: &syn::MetaNameValue) {
|
||||
parse_attr_single_string(errors, m, "default", &mut self.default);
|
||||
}
|
||||
|
||||
fn parse_attr_arg_name(&mut self, errors: &Errors, m: &syn::MetaNameValue) {
|
||||
parse_attr_single_string(errors, m, "arg_name", &mut self.arg_name);
|
||||
}
|
||||
|
||||
fn parse_attr_long(&mut self, errors: &Errors, m: &syn::MetaNameValue) {
|
||||
if let Some(first) = &self.long {
|
||||
errors.duplicate_attrs("long", first, m);
|
||||
} else if let syn::Expr::Path(syn::ExprPath { path, .. }) = &m.value
|
||||
&& let Some(ident) = path.get_ident()
|
||||
&& ident.to_string().eq_ignore_ascii_case("none")
|
||||
{
|
||||
self.long = Some(None);
|
||||
} else if let Some(lit_str) = errors.expect_lit_str(&m.value) {
|
||||
self.long = Some(Some(lit_str.clone()));
|
||||
}
|
||||
if let Some(Some(long)) = &self.long {
|
||||
let value = long.value();
|
||||
check_long_name(errors, long, &value);
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_attr_short(&mut self, errors: &Errors, m: &syn::MetaNameValue) {
|
||||
if let Some(first) = &self.short {
|
||||
errors.duplicate_attrs("short", first, m);
|
||||
} else if let Some(lit_char) = errors.expect_lit_char(&m.value) {
|
||||
self.short = Some(lit_char.clone());
|
||||
if !lit_char.value().is_ascii() {
|
||||
errors.err(lit_char, "Short names must be ASCII");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn check_long_name(errors: &Errors, spanned: &impl syn::spanned::Spanned, value: &str) {
|
||||
if !value.is_ascii() {
|
||||
errors.err(spanned, "Long names must be ASCII");
|
||||
}
|
||||
if !value
|
||||
.chars()
|
||||
.all(|c| c.is_lowercase() || c == '-' || c.is_ascii_digit())
|
||||
{
|
||||
errors.err(
|
||||
spanned,
|
||||
"Long names may only contain lowercase letters, digits, and dashes",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_attr_fn_name(
|
||||
errors: &Errors,
|
||||
m: &syn::MetaList,
|
||||
attr_name: &str,
|
||||
slot: &mut Option<syn::ExprPath>,
|
||||
) {
|
||||
if let Some(first) = slot {
|
||||
errors.duplicate_attrs(attr_name, first, m);
|
||||
}
|
||||
|
||||
*slot = errors.ok(m.parse_args());
|
||||
}
|
||||
|
||||
fn parse_attr_field_type(
|
||||
errors: &Errors,
|
||||
meta: &syn::Meta,
|
||||
kind: FieldKind,
|
||||
slot: &mut Option<FieldType>,
|
||||
) {
|
||||
if let Some(path) = errors.expect_meta_word(meta) {
|
||||
if let Some(first) = slot {
|
||||
errors.duplicate_attrs("field kind", &first.ident, path);
|
||||
} else if let Some(word) = path.get_ident() {
|
||||
*slot = Some(FieldType {
|
||||
kind,
|
||||
ident: word.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Whether the attribute is one like `#[<name> ...]`
|
||||
fn is_matching_attr(name: &str, attr: &syn::Attribute) -> bool {
|
||||
attr.path().segments.len() == 1 && attr.path().segments[0].ident == name
|
||||
}
|
||||
|
||||
/// Checks for `#[doc ...]`, which is generated by doc comments.
|
||||
fn is_doc_attr(attr: &syn::Attribute) -> bool {
|
||||
is_matching_attr("doc", attr)
|
||||
}
|
||||
|
||||
/// Checks for `#[argh ...]`
|
||||
fn is_argh_attr(attr: &syn::Attribute) -> bool {
|
||||
is_matching_attr("argh", attr)
|
||||
}
|
||||
|
||||
/// Filters out non-`#[argh(...)]` attributes and converts to a sequence of `syn::Meta`.
|
||||
fn argh_attr_to_meta_list(
|
||||
errors: &Errors,
|
||||
attr: &syn::Attribute,
|
||||
) -> Option<impl IntoIterator<Item = syn::Meta>> {
|
||||
if !is_argh_attr(attr) {
|
||||
return None;
|
||||
}
|
||||
let ml = errors.expect_meta_list(&attr.meta)?;
|
||||
errors.ok(ml.parse_args_with(
|
||||
syn::punctuated::Punctuated::<syn::Meta, syn::Token![,]>::parse_terminated,
|
||||
))
|
||||
}
|
||||
|
||||
/// Represents a `#[derive(FromArgs)]` type's top-level attributes.
|
||||
#[derive(Default)]
|
||||
pub struct TypeAttrs {
|
||||
pub is_subcommand: Option<syn::Ident>,
|
||||
pub name: Option<syn::LitStr>,
|
||||
pub description: Option<Description>,
|
||||
pub examples: Vec<syn::LitStr>,
|
||||
pub notes: Vec<syn::LitStr>,
|
||||
pub error_codes: Vec<(syn::LitInt, syn::LitStr)>,
|
||||
/// Arguments that trigger printing of the help message
|
||||
pub help_triggers: Option<Vec<syn::LitStr>>,
|
||||
}
|
||||
|
||||
impl TypeAttrs {
|
||||
/// Parse top-level `#[argh(...)]` attributes
|
||||
pub fn parse(errors: &Errors, derive_input: &syn::DeriveInput) -> Self {
|
||||
let mut this = TypeAttrs::default();
|
||||
|
||||
for attr in &derive_input.attrs {
|
||||
if is_doc_attr(attr) {
|
||||
parse_attr_doc(errors, attr, &mut this.description);
|
||||
continue;
|
||||
}
|
||||
|
||||
let ml: Vec<syn::Meta> = if let Some(ml) = argh_attr_to_meta_list(errors, attr) {
|
||||
ml.into_iter().collect()
|
||||
} else {
|
||||
continue;
|
||||
};
|
||||
|
||||
for meta in ml.iter() {
|
||||
let name = meta.path();
|
||||
if name.is_ident("description") {
|
||||
if let Some(m) = errors.expect_meta_name_value(meta) {
|
||||
parse_attr_description(errors, m, &mut this.description);
|
||||
}
|
||||
} else if name.is_ident("error_code") {
|
||||
if let Some(m) = errors.expect_meta_list(meta) {
|
||||
this.parse_attr_error_code(errors, m);
|
||||
}
|
||||
} else if name.is_ident("example") {
|
||||
if let Some(m) = errors.expect_meta_name_value(meta) {
|
||||
this.parse_attr_example(errors, m);
|
||||
}
|
||||
} else if name.is_ident("name") {
|
||||
if let Some(m) = errors.expect_meta_name_value(meta) {
|
||||
this.parse_attr_name(errors, m);
|
||||
}
|
||||
} else if name.is_ident("note") {
|
||||
if let Some(m) = errors.expect_meta_name_value(meta) {
|
||||
this.parse_attr_note(errors, m);
|
||||
}
|
||||
} else if name.is_ident("subcommand") {
|
||||
if let Some(ident) = errors.expect_meta_word(meta).and_then(|p| p.get_ident()) {
|
||||
this.parse_attr_subcommand(errors, ident);
|
||||
}
|
||||
} else if name.is_ident("help_triggers") {
|
||||
if let Some(m) = errors.expect_meta_list(meta) {
|
||||
Self::parse_help_triggers(m, errors, &mut this);
|
||||
}
|
||||
} else {
|
||||
errors.err(
|
||||
meta,
|
||||
concat!(
|
||||
"Invalid type-level `argh` attribute\n",
|
||||
"Expected one of: `description`, `error_code`, `example`, `name`, ",
|
||||
"`note`, `subcommand`, `help_triggers`",
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if this.is_subcommand.is_some() && this.help_triggers.is_some() {
|
||||
let help_meta = ml
|
||||
.iter()
|
||||
.find(|meta| meta.path().is_ident("help_triggers"))
|
||||
.unwrap();
|
||||
errors.err(help_meta, "Cannot use `help_triggers` on a subcommand");
|
||||
}
|
||||
}
|
||||
|
||||
this.check_error_codes(errors);
|
||||
this
|
||||
}
|
||||
|
||||
/// Checks that error codes are within range for `i32` and that they are
|
||||
/// never duplicated.
|
||||
fn check_error_codes(&self, errors: &Errors) {
|
||||
// map from error code to index
|
||||
let mut map: HashMap<u64, usize> = HashMap::new();
|
||||
for (index, (lit_int, _lit_str)) in self.error_codes.iter().enumerate() {
|
||||
let value = match lit_int.base10_parse::<u64>() {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
errors.push(e);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
if value > (i32::MAX as u64) {
|
||||
errors.err(lit_int, "Error code out of range for `i32`");
|
||||
}
|
||||
match map.entry(value) {
|
||||
Entry::Occupied(previous) => {
|
||||
let previous_index = *previous.get();
|
||||
let (previous_lit_int, _previous_lit_str) = &self.error_codes[previous_index];
|
||||
errors.err(lit_int, &format!("Duplicate error code {}", value));
|
||||
errors.err(
|
||||
previous_lit_int,
|
||||
&format!("Error code {} previously defined here", value),
|
||||
);
|
||||
}
|
||||
Entry::Vacant(slot) => {
|
||||
slot.insert(index);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_attr_error_code(&mut self, errors: &Errors, ml: &syn::MetaList) {
|
||||
errors.ok(ml.parse_args_with(|input: syn::parse::ParseStream| {
|
||||
let err_code = input.parse()?;
|
||||
input.parse::<syn::Token![,]>()?;
|
||||
let err_msg = input.parse()?;
|
||||
if let (Some(err_code), Some(err_msg)) = (
|
||||
errors.expect_lit_int(&err_code),
|
||||
errors.expect_lit_str(&err_msg),
|
||||
) {
|
||||
self.error_codes.push((err_code.clone(), err_msg.clone()));
|
||||
}
|
||||
Ok(())
|
||||
}));
|
||||
}
|
||||
|
||||
fn parse_attr_example(&mut self, errors: &Errors, m: &syn::MetaNameValue) {
|
||||
parse_attr_multi_string(errors, m, &mut self.examples)
|
||||
}
|
||||
|
||||
fn parse_attr_name(&mut self, errors: &Errors, m: &syn::MetaNameValue) {
|
||||
parse_attr_single_string(errors, m, "name", &mut self.name);
|
||||
if let Some(name) = &self.name
|
||||
&& name.value() == "help"
|
||||
{
|
||||
errors.err(name, "Custom `help` commands are not supported.");
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_attr_note(&mut self, errors: &Errors, m: &syn::MetaNameValue) {
|
||||
parse_attr_multi_string(errors, m, &mut self.notes)
|
||||
}
|
||||
|
||||
fn parse_attr_subcommand(&mut self, errors: &Errors, ident: &syn::Ident) {
|
||||
if let Some(first) = &self.is_subcommand {
|
||||
errors.duplicate_attrs("subcommand", first, ident);
|
||||
} else {
|
||||
self.is_subcommand = Some(ident.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// get the list of arguments that trigger printing of the help message as a vector of strings (help_arguments("-h", "--help", "help"))
|
||||
fn parse_help_triggers(m: &syn::MetaList, errors: &Errors, this: &mut TypeAttrs) {
|
||||
let parser = Punctuated::<syn::Expr, syn::Token![,]>::parse_terminated;
|
||||
match parser.parse(m.tokens.clone().into()) {
|
||||
Ok(args) => {
|
||||
let mut triggers = Vec::new();
|
||||
for arg in args {
|
||||
if let syn::Expr::Lit(syn::ExprLit {
|
||||
lit: syn::Lit::Str(lit_str),
|
||||
..
|
||||
}) = arg
|
||||
{
|
||||
triggers.push(lit_str);
|
||||
}
|
||||
}
|
||||
|
||||
this.help_triggers = Some(triggers);
|
||||
}
|
||||
Err(err) => errors.push(err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents an enum variant's attributes.
|
||||
#[derive(Default)]
|
||||
pub struct VariantAttrs {
|
||||
pub is_dynamic: Option<syn::Path>,
|
||||
}
|
||||
|
||||
impl VariantAttrs {
|
||||
/// Parse enum variant `#[argh(...)]` attributes
|
||||
pub fn parse(errors: &Errors, variant: &syn::Variant) -> Self {
|
||||
let mut this = VariantAttrs::default();
|
||||
|
||||
let fields = match &variant.fields {
|
||||
syn::Fields::Named(fields) => Some(&fields.named),
|
||||
syn::Fields::Unnamed(fields) => Some(&fields.unnamed),
|
||||
syn::Fields::Unit => None,
|
||||
};
|
||||
|
||||
for field in fields.into_iter().flatten() {
|
||||
for attr in &field.attrs {
|
||||
if is_argh_attr(attr) {
|
||||
err_unused_enum_attr(errors, attr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for attr in &variant.attrs {
|
||||
let ml = if let Some(ml) = argh_attr_to_meta_list(errors, attr) {
|
||||
ml
|
||||
} else {
|
||||
continue;
|
||||
};
|
||||
|
||||
for meta in ml {
|
||||
let name = meta.path();
|
||||
if name.is_ident("dynamic") {
|
||||
if let Some(prev) = this.is_dynamic.as_ref() {
|
||||
errors.duplicate_attrs("dynamic", prev, &meta);
|
||||
} else {
|
||||
this.is_dynamic = errors.expect_meta_word(&meta).cloned();
|
||||
}
|
||||
} else {
|
||||
errors.err(
|
||||
&meta,
|
||||
"Invalid variant-level `argh` attribute\n\
|
||||
Variants can only have the #[argh(dynamic)] attribute.",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this
|
||||
}
|
||||
}
|
||||
|
||||
fn check_option_description(errors: &Errors, desc: &str, span: Span) {
|
||||
let chars = &mut desc.trim().chars();
|
||||
match (chars.next(), chars.next()) {
|
||||
(Some(x), _) if x.is_lowercase() => {}
|
||||
// If both the first and second letter are not lowercase,
|
||||
// this is likely an initialism which should be allowed.
|
||||
(Some(x), Some(y)) if !x.is_lowercase() && (y.is_alphanumeric() && !y.is_lowercase()) => {}
|
||||
_ => {
|
||||
errors.err_span(span, "Descriptions must begin with a lowercase letter");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_attr_single_string(
|
||||
errors: &Errors,
|
||||
m: &syn::MetaNameValue,
|
||||
name: &str,
|
||||
slot: &mut Option<syn::LitStr>,
|
||||
) {
|
||||
if let Some(first) = slot {
|
||||
errors.duplicate_attrs(name, first, m);
|
||||
} else if let Some(lit_str) = errors.expect_lit_str(&m.value) {
|
||||
*slot = Some(lit_str.clone());
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_attr_multi_string(errors: &Errors, m: &syn::MetaNameValue, list: &mut Vec<syn::LitStr>) {
|
||||
if let Some(lit_str) = errors.expect_lit_str(&m.value) {
|
||||
list.push(lit_str.clone());
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_attr_doc(errors: &Errors, attr: &syn::Attribute, slot: &mut Option<Description>) {
|
||||
let nv = if let Some(nv) = errors.expect_meta_name_value(&attr.meta) {
|
||||
nv
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
|
||||
// Don't replace an existing explicit description.
|
||||
if slot.as_ref().map(|d| d.explicit).unwrap_or(false) {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(lit_str) = errors.expect_lit_str(&nv.value) {
|
||||
let lit_str = if let Some(previous) = slot {
|
||||
let previous = &previous.content;
|
||||
let previous_span = previous.span();
|
||||
syn::LitStr::new(
|
||||
&(previous.value() + &unescape_doc(lit_str.value())),
|
||||
previous_span,
|
||||
)
|
||||
} else {
|
||||
syn::LitStr::new(&unescape_doc(lit_str.value()), lit_str.span())
|
||||
};
|
||||
*slot = Some(Description {
|
||||
explicit: false,
|
||||
content: lit_str,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Replaces escape sequences in doc-comments with the characters they represent.
|
||||
///
|
||||
/// Rustdoc understands CommonMark escape sequences consisting of a backslash followed by an ASCII
|
||||
/// punctuation character. Any other backslash is treated as a literal backslash.
|
||||
fn unescape_doc(s: String) -> String {
|
||||
let mut result = String::with_capacity(s.len());
|
||||
|
||||
let mut characters = s.chars().peekable();
|
||||
while let Some(mut character) = characters.next() {
|
||||
if character == '\\'
|
||||
&& let Some(next_character) = characters.peek()
|
||||
&& next_character.is_ascii_punctuation()
|
||||
{
|
||||
character = *next_character;
|
||||
characters.next();
|
||||
}
|
||||
|
||||
// Braces must be escaped as this string will be used as a format string
|
||||
if character == '{' || character == '}' {
|
||||
result.push(character);
|
||||
}
|
||||
|
||||
result.push(character);
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn parse_attr_description(errors: &Errors, m: &syn::MetaNameValue, slot: &mut Option<Description>) {
|
||||
let lit_str = if let Some(lit_str) = errors.expect_lit_str(&m.value) {
|
||||
lit_str
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
|
||||
// Don't allow multiple explicit (non doc-comment) descriptions
|
||||
if let Some(description) = slot
|
||||
&& description.explicit
|
||||
{
|
||||
errors.duplicate_attrs("description", &description.content, lit_str);
|
||||
}
|
||||
|
||||
*slot = Some(Description {
|
||||
explicit: true,
|
||||
content: lit_str.clone(),
|
||||
});
|
||||
}
|
||||
|
||||
/// Checks that a `#![derive(FromArgs)]` enum has an `#[argh(subcommand)]`
|
||||
/// attribute and that it does not have any other type-level `#[argh(...)]` attributes.
|
||||
pub fn check_enum_type_attrs(errors: &Errors, type_attrs: &TypeAttrs, type_span: &Span) {
|
||||
let TypeAttrs {
|
||||
is_subcommand,
|
||||
name,
|
||||
description,
|
||||
examples,
|
||||
notes,
|
||||
error_codes,
|
||||
help_triggers,
|
||||
} = type_attrs;
|
||||
|
||||
// Ensure that `#[argh(subcommand)]` is present.
|
||||
if is_subcommand.is_none() {
|
||||
errors.err_span(
|
||||
*type_span,
|
||||
concat!(
|
||||
"`#![derive(FromArgs)]` on `enum`s can only be used to enumerate subcommands.\n",
|
||||
"Consider adding `#[argh(subcommand)]` to the `enum` declaration.",
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// Error on all other type-level attributes.
|
||||
if let Some(name) = name {
|
||||
err_unused_enum_attr(errors, name);
|
||||
}
|
||||
if let Some(description) = description
|
||||
&& description.explicit
|
||||
{
|
||||
err_unused_enum_attr(errors, &description.content);
|
||||
}
|
||||
if let Some(example) = examples.first() {
|
||||
err_unused_enum_attr(errors, example);
|
||||
}
|
||||
if let Some(note) = notes.first() {
|
||||
err_unused_enum_attr(errors, note);
|
||||
}
|
||||
if let Some(err_code) = error_codes.first() {
|
||||
err_unused_enum_attr(errors, &err_code.0);
|
||||
}
|
||||
if let Some(triggers) = help_triggers
|
||||
&& let Some(trigger) = triggers.first()
|
||||
{
|
||||
err_unused_enum_attr(errors, trigger);
|
||||
}
|
||||
}
|
||||
|
||||
fn err_unused_enum_attr(errors: &Errors, location: &impl syn::spanned::Spanned) {
|
||||
errors.err(
|
||||
location,
|
||||
concat!(
|
||||
"Unused `argh` attribute on `#![derive(FromArgs)]` enum. ",
|
||||
"Such `enum`s can only be used to dispatch to subcommands, ",
|
||||
"and should only contain the #[argh(subcommand)] attribute.",
|
||||
),
|
||||
);
|
||||
}
|
||||
19
native/src/base/derive/lib.rs
Normal file
19
native/src/base/derive/lib.rs
Normal file
@@ -0,0 +1,19 @@
|
||||
#![recursion_limit = "256"]
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
|
||||
mod argh;
|
||||
mod decodable;
|
||||
|
||||
#[proc_macro_derive(Decodable)]
|
||||
pub fn derive_decodable(input: TokenStream) -> TokenStream {
|
||||
decodable::derive_decodable(input)
|
||||
}
|
||||
|
||||
/// Entrypoint for `#[derive(FromArgs)]`.
|
||||
#[proc_macro_derive(FromArgs, attributes(argh))]
|
||||
pub fn argh_derive(input: TokenStream) -> TokenStream {
|
||||
let ast = syn::parse_macro_input!(input as syn::DeriveInput);
|
||||
let token = argh::impl_from_args(&ast);
|
||||
token.into()
|
||||
}
|
||||
@@ -4,7 +4,10 @@ use crate::{
|
||||
fd_path, fd_set_attr,
|
||||
};
|
||||
use libc::{dirent, mode_t};
|
||||
use nix::{errno::Errno, fcntl::AtFlags, fcntl::OFlag, sys::stat::Mode, unistd::UnlinkatFlags};
|
||||
use nix::errno::Errno;
|
||||
use nix::fcntl::{AtFlags, OFlag};
|
||||
use nix::sys::stat::Mode;
|
||||
use nix::unistd::UnlinkatFlags;
|
||||
use std::fs::File;
|
||||
use std::ops::Deref;
|
||||
use std::os::fd::{AsFd, AsRawFd, BorrowedFd, IntoRawFd, OwnedFd, RawFd};
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include <sys/stat.h>
|
||||
#include <linux/fs.h>
|
||||
#include <functional>
|
||||
#include <string_view>
|
||||
#include <string>
|
||||
|
||||
#include "base-rs.hpp"
|
||||
|
||||
struct mmap_data : public byte_data {
|
||||
static_assert((sizeof(void *) == 8 && BLKGETSIZE64 == 0x80081272) ||
|
||||
(sizeof(void *) == 4 && BLKGETSIZE64 == 0x80041272));
|
||||
ALLOW_MOVE_ONLY(mmap_data)
|
||||
|
||||
mmap_data() = default;
|
||||
explicit mmap_data(const char *name, bool rw = false);
|
||||
mmap_data(int dirfd, const char *name, bool rw = false);
|
||||
mmap_data(int fd, size_t sz, bool rw = false);
|
||||
~mmap_data();
|
||||
};
|
||||
|
||||
extern "C" {
|
||||
|
||||
int mkdirs(const char *path, mode_t mode);
|
||||
ssize_t canonical_path(const char * __restrict__ path, char * __restrict__ buf, size_t bufsiz);
|
||||
bool rm_rf(const char *path);
|
||||
bool frm_rf(int dirfd);
|
||||
bool cp_afc(const char *src, const char *dest);
|
||||
bool mv_path(const char *src, const char *dest);
|
||||
bool link_path(const char *src, const char *dest);
|
||||
bool clone_attr(const char *src, const char *dest);
|
||||
bool fclone_attr(int src, int dest);
|
||||
|
||||
} // extern "C"
|
||||
|
||||
std::string full_read(int fd);
|
||||
std::string full_read(const char *filename);
|
||||
void write_zero(int fd, size_t size);
|
||||
std::string resolve_preinit_dir(const char *base_dir);
|
||||
|
||||
// Functor = function<bool(Utf8CStr, Utf8CStr)>
|
||||
template <typename Functor>
|
||||
void parse_prop_file(const char *file, Functor &&fn) {
|
||||
parse_prop_file_rs(file, [&](rust::Str key, rust::Str val) -> bool {
|
||||
// We perform the null termination here in C++ because it's very difficult to do it
|
||||
// right in Rust due to pointer provenance. Trying to dereference a pointer without
|
||||
// the correct provenance in Rust, even in unsafe code, is undefined behavior.
|
||||
// However on the C++ side, there are fewer restrictions on pointers, so the const_cast here
|
||||
// will not trigger UB in the compiler.
|
||||
*(const_cast<char *>(key.data()) + key.size()) = '\0';
|
||||
*(const_cast<char *>(val.data()) + val.size()) = '\0';
|
||||
return fn(Utf8CStr(key.data(), key.size() + 1), Utf8CStr(val.data(), val.size() + 1));
|
||||
});
|
||||
}
|
||||
|
||||
using sFILE = std::unique_ptr<FILE, decltype(&fclose)>;
|
||||
using sDIR = std::unique_ptr<DIR, decltype(&closedir)>;
|
||||
sDIR make_dir(DIR *dp);
|
||||
sFILE make_file(FILE *fp);
|
||||
|
||||
static inline sDIR open_dir(const char *path) {
|
||||
return make_dir(opendir(path));
|
||||
}
|
||||
|
||||
static inline sDIR xopen_dir(const char *path) {
|
||||
return make_dir(xopendir(path));
|
||||
}
|
||||
|
||||
static inline sDIR xopen_dir(int dirfd) {
|
||||
return make_dir(xfdopendir(dirfd));
|
||||
}
|
||||
|
||||
static inline sFILE open_file(const char *path, const char *mode) {
|
||||
return make_file(fopen(path, mode));
|
||||
}
|
||||
|
||||
static inline sFILE xopen_file(const char *path, const char *mode) {
|
||||
return make_file(xfopen(path, mode));
|
||||
}
|
||||
|
||||
static inline sFILE xopen_file(int fd, const char *mode) {
|
||||
return make_file(xfdopen(fd, mode));
|
||||
}
|
||||
@@ -4,13 +4,10 @@ use crate::{
|
||||
};
|
||||
use bytemuck::{Pod, bytes_of, bytes_of_mut};
|
||||
use libc::{c_uint, makedev, mode_t};
|
||||
use nix::{
|
||||
errno::Errno,
|
||||
fcntl::{AT_FDCWD, OFlag},
|
||||
sys::stat::{FchmodatFlags, Mode},
|
||||
unistd::AccessFlags,
|
||||
unistd::{Gid, Uid},
|
||||
};
|
||||
use nix::errno::Errno;
|
||||
use nix::fcntl::{AT_FDCWD, OFlag};
|
||||
use nix::sys::stat::{FchmodatFlags, Mode};
|
||||
use nix::unistd::{AccessFlags, Gid, Uid};
|
||||
use num_traits::AsPrimitive;
|
||||
use std::cmp::min;
|
||||
use std::ffi::CStr;
|
||||
|
||||
@@ -1,10 +1,353 @@
|
||||
#pragma once
|
||||
|
||||
#include "../xwrap.hpp"
|
||||
#include "../misc.hpp"
|
||||
#include "../base-rs.hpp"
|
||||
#include "../files.hpp"
|
||||
#include "../logging.hpp"
|
||||
#include <sys/stat.h>
|
||||
#include <unistd.h>
|
||||
#include <dirent.h>
|
||||
#include <fcntl.h>
|
||||
#include <functional>
|
||||
|
||||
using rust::xpipe2;
|
||||
using kv_pairs = std::vector<std::pair<std::string, std::string>>;
|
||||
#include <rust/cxx.h>
|
||||
|
||||
void LOGD(const char *fmt, ...) __printflike(1, 2);
|
||||
void LOGI(const char *fmt, ...) __printflike(1, 2);
|
||||
void LOGW(const char *fmt, ...) __printflike(1, 2);
|
||||
void LOGE(const char *fmt, ...) __printflike(1, 2);
|
||||
#define PLOGE(fmt, args...) LOGE(fmt " failed with %d: %s\n", ##args, errno, std::strerror(errno))
|
||||
|
||||
extern "C" {
|
||||
|
||||
// xwraps
|
||||
|
||||
FILE *xfopen(const char *pathname, const char *mode);
|
||||
FILE *xfdopen(int fd, const char *mode);
|
||||
int xopen(const char *pathname, int flags, mode_t mode = 0);
|
||||
int xopenat(int dirfd, const char *pathname, int flags, mode_t mode = 0);
|
||||
ssize_t xwrite(int fd, const void *buf, size_t count);
|
||||
ssize_t xread(int fd, void *buf, size_t count);
|
||||
ssize_t xxread(int fd, void *buf, size_t count);
|
||||
int xsetns(int fd, int nstype);
|
||||
int xunshare(int flags);
|
||||
DIR *xopendir(const char *name);
|
||||
DIR *xfdopendir(int fd);
|
||||
dirent *xreaddir(DIR *dirp);
|
||||
pid_t xsetsid();
|
||||
int xfstat(int fd, struct stat *buf);
|
||||
int xdup2(int oldfd, int newfd);
|
||||
ssize_t xreadlinkat(
|
||||
int dirfd, const char * __restrict__ pathname, char * __restrict__ buf, size_t bufsiz);
|
||||
int xsymlink(const char *target, const char *linkpath);
|
||||
int xmount(const char *source, const char *target,
|
||||
const char *filesystemtype, unsigned long mountflags,
|
||||
const void *data);
|
||||
int xumount2(const char *target, int flags);
|
||||
int xrename(const char *oldpath, const char *newpath);
|
||||
int xmkdir(const char *pathname, mode_t mode);
|
||||
int xmkdirs(const char *pathname, mode_t mode);
|
||||
ssize_t xsendfile(int out_fd, int in_fd, off_t *offset, size_t count);
|
||||
pid_t xfork();
|
||||
ssize_t xrealpath(const char * __restrict__ path, char * __restrict__ buf, size_t bufsiz);
|
||||
int xmknod(const char * pathname, mode_t mode, dev_t dev);
|
||||
|
||||
// Utils
|
||||
|
||||
int mkdirs(const char *path, mode_t mode);
|
||||
ssize_t canonical_path(const char * __restrict__ path, char * __restrict__ buf, size_t bufsiz);
|
||||
bool rm_rf(const char *path);
|
||||
bool cp_afc(const char *src, const char *dest);
|
||||
bool mv_path(const char *src, const char *dest);
|
||||
bool link_path(const char *src, const char *dest);
|
||||
bool clone_attr(const char *src, const char *dest);
|
||||
bool fclone_attr(int src, int dest);
|
||||
|
||||
} // extern "C"
|
||||
|
||||
#define DISALLOW_COPY_AND_MOVE(clazz) \
|
||||
clazz(const clazz&) = delete; \
|
||||
clazz(clazz &&) = delete;
|
||||
|
||||
#define ALLOW_MOVE_ONLY(clazz) \
|
||||
clazz(const clazz&) = delete; \
|
||||
clazz(clazz &&o) : clazz() { swap(o); } \
|
||||
clazz& operator=(clazz &&o) { swap(o); return *this; }
|
||||
|
||||
struct Utf8CStr;
|
||||
|
||||
class mutex_guard {
|
||||
DISALLOW_COPY_AND_MOVE(mutex_guard)
|
||||
public:
|
||||
explicit mutex_guard(pthread_mutex_t &m): mutex(&m) {
|
||||
pthread_mutex_lock(mutex);
|
||||
}
|
||||
void unlock() {
|
||||
pthread_mutex_unlock(mutex);
|
||||
mutex = nullptr;
|
||||
}
|
||||
~mutex_guard() {
|
||||
if (mutex) pthread_mutex_unlock(mutex);
|
||||
}
|
||||
private:
|
||||
pthread_mutex_t *mutex;
|
||||
};
|
||||
|
||||
template <class Func>
|
||||
class run_finally {
|
||||
DISALLOW_COPY_AND_MOVE(run_finally)
|
||||
public:
|
||||
explicit run_finally(Func &&fn) : fn(std::move(fn)) {}
|
||||
~run_finally() { fn(); }
|
||||
private:
|
||||
Func fn;
|
||||
};
|
||||
|
||||
template<class T>
|
||||
static void default_new(T *&p) { p = new T(); }
|
||||
|
||||
template<class T>
|
||||
static void default_new(std::unique_ptr<T> &p) { p.reset(new T()); }
|
||||
|
||||
struct StringCmp {
|
||||
using is_transparent = void;
|
||||
bool operator()(std::string_view a, std::string_view b) const { return a < b; }
|
||||
};
|
||||
|
||||
using ByteSlice = rust::Slice<const uint8_t>;
|
||||
using MutByteSlice = rust::Slice<uint8_t>;
|
||||
|
||||
// Interchangeable as `&[u8]` in Rust
|
||||
struct byte_view {
|
||||
byte_view() : ptr(nullptr), sz(0) {}
|
||||
byte_view(const void *buf, size_t sz) : ptr((uint8_t *) buf), sz(sz) {}
|
||||
|
||||
// byte_view, or any of its subclasses, can be copied as byte_view
|
||||
byte_view(const byte_view &o) : ptr(o.ptr), sz(o.sz) {}
|
||||
|
||||
// Transparent conversion to Rust slice
|
||||
byte_view(const ByteSlice o) : byte_view(o.data(), o.size()) {}
|
||||
operator ByteSlice() const { return {ptr, sz}; }
|
||||
|
||||
// String as bytes, including null terminator
|
||||
byte_view(const char *s) : byte_view(s, strlen(s) + 1) {}
|
||||
|
||||
const uint8_t *data() const { return ptr; }
|
||||
size_t size() const { return sz; }
|
||||
|
||||
protected:
|
||||
uint8_t *ptr;
|
||||
size_t sz;
|
||||
};
|
||||
|
||||
// Interchangeable as `&mut [u8]` in Rust
|
||||
struct byte_data : public byte_view {
|
||||
byte_data() = default;
|
||||
byte_data(void *buf, size_t sz) : byte_view(buf, sz) {}
|
||||
|
||||
// byte_data, or any of its subclasses, can be copied as byte_data
|
||||
byte_data(const byte_data &o) : byte_data(o.ptr, o.sz) {}
|
||||
|
||||
// Transparent conversion to Rust slice
|
||||
byte_data(const MutByteSlice o) : byte_data(o.data(), o.size()) {}
|
||||
operator MutByteSlice() const { return {ptr, sz}; }
|
||||
|
||||
using byte_view::data;
|
||||
uint8_t *data() const { return ptr; }
|
||||
|
||||
rust::Vec<size_t> patch(byte_view from, byte_view to) const;
|
||||
};
|
||||
|
||||
struct mmap_data : public byte_data {
|
||||
ALLOW_MOVE_ONLY(mmap_data)
|
||||
|
||||
mmap_data() = default;
|
||||
explicit mmap_data(const char *name, bool rw = false);
|
||||
mmap_data(int dirfd, const char *name, bool rw = false);
|
||||
mmap_data(int fd, size_t sz, bool rw = false);
|
||||
~mmap_data();
|
||||
private:
|
||||
void swap(mmap_data &o);
|
||||
};
|
||||
|
||||
|
||||
struct owned_fd {
|
||||
ALLOW_MOVE_ONLY(owned_fd)
|
||||
|
||||
owned_fd() : fd(-1) {}
|
||||
owned_fd(int fd) : fd(fd) {}
|
||||
~owned_fd() { close(fd); fd = -1; }
|
||||
|
||||
operator int() { return fd; }
|
||||
int release() { int f = fd; fd = -1; return f; }
|
||||
void swap(owned_fd &owned) { std::swap(fd, owned.fd); }
|
||||
|
||||
private:
|
||||
int fd;
|
||||
};
|
||||
|
||||
rust::Vec<size_t> mut_u8_patch(MutByteSlice buf, ByteSlice from, ByteSlice to);
|
||||
|
||||
uint32_t parse_uint32_hex(std::string_view s);
|
||||
int parse_int(std::string_view s);
|
||||
|
||||
using thread_entry = void *(*)(void *);
|
||||
extern "C" int new_daemon_thread(thread_entry entry, void *arg = nullptr);
|
||||
|
||||
static inline std::string rtrim(std::string &&s) {
|
||||
s.erase(std::find_if(s.rbegin(), s.rend(), [](unsigned char ch) {
|
||||
return !std::isspace(ch) && ch != '\0';
|
||||
}).base(), s.end());
|
||||
return std::move(s);
|
||||
}
|
||||
|
||||
int fork_dont_care();
|
||||
int fork_no_orphan();
|
||||
void init_argv0(int argc, char **argv);
|
||||
void set_nice_name(Utf8CStr name);
|
||||
int switch_mnt_ns(int pid);
|
||||
std::string &replace_all(std::string &str, std::string_view from, std::string_view to);
|
||||
std::vector<std::string> split(std::string_view s, std::string_view delims);
|
||||
|
||||
// Similar to vsnprintf, but the return value is the written number of bytes
|
||||
__printflike(3, 0) int vssprintf(char *dest, size_t size, const char *fmt, va_list ap);
|
||||
// Similar to snprintf, but the return value is the written number of bytes
|
||||
__printflike(3, 4) int ssprintf(char *dest, size_t size, const char *fmt, ...);
|
||||
// This is not actually the strscpy from the Linux kernel.
|
||||
// Silently truncates, and returns the number of bytes written.
|
||||
extern "C" size_t strscpy(char *dest, const char *src, size_t size);
|
||||
|
||||
// Ban usage of unsafe cstring functions
|
||||
#define vsnprintf __use_vssprintf_instead__
|
||||
#define snprintf __use_ssprintf_instead__
|
||||
#define strlcpy __use_strscpy_instead__
|
||||
|
||||
struct exec_t {
|
||||
bool err = false;
|
||||
int fd = -2;
|
||||
void (*pre_exec)() = nullptr;
|
||||
int (*fork)() = xfork;
|
||||
const char **argv = nullptr;
|
||||
};
|
||||
|
||||
int exec_command(exec_t &exec);
|
||||
template <class ...Args>
|
||||
int exec_command(exec_t &exec, Args &&...args) {
|
||||
const char *argv[] = {args..., nullptr};
|
||||
exec.argv = argv;
|
||||
return exec_command(exec);
|
||||
}
|
||||
int exec_command_sync(exec_t &exec);
|
||||
template <class ...Args>
|
||||
int exec_command_sync(exec_t &exec, Args &&...args) {
|
||||
const char *argv[] = {args..., nullptr};
|
||||
exec.argv = argv;
|
||||
return exec_command_sync(exec);
|
||||
}
|
||||
template <class ...Args>
|
||||
int exec_command_sync(Args &&...args) {
|
||||
exec_t exec;
|
||||
return exec_command_sync(exec, args...);
|
||||
}
|
||||
template <class ...Args>
|
||||
void exec_command_async(Args &&...args) {
|
||||
const char *argv[] = {args..., nullptr};
|
||||
exec_t exec {
|
||||
.fork = fork_dont_care,
|
||||
.argv = argv,
|
||||
};
|
||||
exec_command(exec);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
constexpr auto operator+(T e) noexcept ->
|
||||
std::enable_if_t<std::is_enum<T>::value, std::underlying_type_t<T>> {
|
||||
return static_cast<std::underlying_type_t<T>>(e);
|
||||
}
|
||||
|
||||
std::string full_read(int fd);
|
||||
std::string full_read(const char *filename);
|
||||
void write_zero(int fd, size_t size);
|
||||
std::string resolve_preinit_dir(const char *base_dir);
|
||||
|
||||
using sFILE = std::unique_ptr<FILE, decltype(&fclose)>;
|
||||
using sDIR = std::unique_ptr<DIR, decltype(&closedir)>;
|
||||
sDIR make_dir(DIR *dp);
|
||||
sFILE make_file(FILE *fp);
|
||||
|
||||
static inline sDIR open_dir(const char *path) {
|
||||
return make_dir(opendir(path));
|
||||
}
|
||||
|
||||
static inline sDIR xopen_dir(const char *path) {
|
||||
return make_dir(xopendir(path));
|
||||
}
|
||||
|
||||
static inline sDIR xopen_dir(int dirfd) {
|
||||
return make_dir(xfdopendir(dirfd));
|
||||
}
|
||||
|
||||
static inline sFILE open_file(const char *path, const char *mode) {
|
||||
return make_file(fopen(path, mode));
|
||||
}
|
||||
|
||||
static inline sFILE xopen_file(const char *path, const char *mode) {
|
||||
return make_file(xfopen(path, mode));
|
||||
}
|
||||
|
||||
static inline sFILE xopen_file(int fd, const char *mode) {
|
||||
return make_file(xfdopen(fd, mode));
|
||||
}
|
||||
|
||||
// Bindings to &Utf8CStr in Rust
|
||||
struct Utf8CStr {
|
||||
const char *data() const;
|
||||
size_t length() const;
|
||||
Utf8CStr(const char *s, size_t len);
|
||||
|
||||
Utf8CStr() : Utf8CStr("", 1) {};
|
||||
Utf8CStr(const Utf8CStr &o) = default;
|
||||
Utf8CStr(const char *s) : Utf8CStr(s, strlen(s) + 1) {};
|
||||
Utf8CStr(std::string s) : Utf8CStr(s.data(), s.length() + 1) {};
|
||||
const char *c_str() const { return this->data(); }
|
||||
size_t size() const { return this->length(); }
|
||||
bool empty() const { return this->length() == 0 ; }
|
||||
std::string_view sv() const { return {data(), length()}; }
|
||||
operator std::string_view() const { return sv(); }
|
||||
bool operator==(std::string_view rhs) const { return sv() == rhs; }
|
||||
|
||||
private:
|
||||
#pragma clang diagnostic push
|
||||
#pragma clang diagnostic ignored "-Wunused-private-field"
|
||||
std::array<std::uintptr_t, 2> repr;
|
||||
#pragma clang diagnostic pop
|
||||
};
|
||||
|
||||
// Bindings for std::function to be callable from Rust
|
||||
using CxxFnBoolStrStr = std::function<bool(rust::Str, rust::Str)>;
|
||||
struct FnBoolStrStr : public CxxFnBoolStrStr {
|
||||
using CxxFnBoolStrStr::function;
|
||||
bool call(rust::Str a, rust::Str b) const {
|
||||
return operator()(a, b);
|
||||
}
|
||||
};
|
||||
using CxxFnBoolStr = std::function<bool(Utf8CStr)>;
|
||||
struct FnBoolStr : public CxxFnBoolStr {
|
||||
using CxxFnBoolStr::function;
|
||||
bool call(Utf8CStr s) const {
|
||||
return operator()(s);
|
||||
}
|
||||
};
|
||||
|
||||
#include "../base-rs.hpp"
|
||||
|
||||
// Functor = function<bool(Utf8CStr, Utf8CStr)>
|
||||
template <typename Functor>
|
||||
void parse_prop_file(const char *file, Functor &&fn) {
|
||||
parse_prop_file_rs(file, [&](rust::Str key, rust::Str val) -> bool {
|
||||
// We perform the null termination here in C++ because it's very difficult to do it
|
||||
// right in Rust due to pointer provenance. Trying to dereference a pointer without
|
||||
// the correct provenance in Rust, even in unsafe code, is undefined behavior.
|
||||
// However on the C++ side, there are fewer restrictions on pointers, so the const_cast here
|
||||
// will not trigger UB in the compiler.
|
||||
*(const_cast<char *>(key.data()) + key.size()) = '\0';
|
||||
*(const_cast<char *>(val.data()) + val.size()) = '\0';
|
||||
return fn(Utf8CStr(key.data(), key.size() + 1), Utf8CStr(val.data(), val.size() + 1));
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
#![feature(vec_into_raw_parts)]
|
||||
#![allow(clippy::missing_safety_doc)]
|
||||
|
||||
pub use const_format;
|
||||
pub use libc;
|
||||
pub use nix;
|
||||
pub use {const_format, libc, nix};
|
||||
|
||||
pub use cstr::{
|
||||
FsPathFollow, StrErr, Utf8CStr, Utf8CStrBuf, Utf8CStrBufArr, Utf8CStrBufRef, Utf8CString,
|
||||
};
|
||||
use cxx_extern::*;
|
||||
pub use derive;
|
||||
pub use dir::*;
|
||||
pub use ffi::{Utf8CStrRef, fork_dont_care, set_nice_name};
|
||||
pub use files::*;
|
||||
@@ -16,6 +15,7 @@ pub use logging::*;
|
||||
pub use misc::*;
|
||||
pub use result::*;
|
||||
|
||||
pub mod argh;
|
||||
pub mod cstr;
|
||||
mod cxx_extern;
|
||||
mod dir;
|
||||
@@ -39,7 +39,7 @@ mod ffi {
|
||||
}
|
||||
|
||||
unsafe extern "C++" {
|
||||
include!("misc.hpp");
|
||||
include!("base.hpp");
|
||||
|
||||
#[cxx_name = "Utf8CStr"]
|
||||
type Utf8CStrRef<'a> = &'a crate::cstr::Utf8CStr;
|
||||
@@ -62,11 +62,11 @@ mod ffi {
|
||||
fn parse_prop_file_rs(name: Utf8CStrRef, f: &FnBoolStrStr);
|
||||
#[cxx_name = "file_readline"]
|
||||
fn file_readline_for_cxx(fd: i32, f: &FnBoolStr);
|
||||
fn xpipe2(fds: &mut [i32; 2], flags: i32) -> i32;
|
||||
}
|
||||
|
||||
#[namespace = "rust"]
|
||||
extern "Rust" {
|
||||
fn xpipe2(fds: &mut [i32; 2], flags: i32) -> i32;
|
||||
#[cxx_name = "map_file"]
|
||||
fn map_file_for_cxx(path: Utf8CStrRef, rw: bool) -> &'static mut [u8];
|
||||
#[cxx_name = "map_file_at"]
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include <cerrno>
|
||||
#include <cstdarg>
|
||||
|
||||
void LOGD(const char *fmt, ...) __printflike(1, 2);
|
||||
void LOGI(const char *fmt, ...) __printflike(1, 2);
|
||||
void LOGW(const char *fmt, ...) __printflike(1, 2);
|
||||
void LOGE(const char *fmt, ...) __printflike(1, 2);
|
||||
#define PLOGE(fmt, args...) LOGE(fmt " failed with %d: %s\n", ##args, errno, std::strerror(errno))
|
||||
@@ -1,250 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include <pthread.h>
|
||||
#include <string>
|
||||
#include <functional>
|
||||
#include <string_view>
|
||||
#include <bitset>
|
||||
#include <rust/cxx.h>
|
||||
|
||||
#include "xwrap.hpp"
|
||||
|
||||
#define DISALLOW_COPY_AND_MOVE(clazz) \
|
||||
clazz(const clazz&) = delete; \
|
||||
clazz(clazz &&) = delete;
|
||||
|
||||
#define ALLOW_MOVE_ONLY(clazz) \
|
||||
clazz(const clazz&) = delete; \
|
||||
clazz(clazz &&o) : clazz() { swap(o); } \
|
||||
clazz& operator=(clazz &&o) { swap(o); return *this; }
|
||||
|
||||
struct Utf8CStr;
|
||||
|
||||
class mutex_guard {
|
||||
DISALLOW_COPY_AND_MOVE(mutex_guard)
|
||||
public:
|
||||
explicit mutex_guard(pthread_mutex_t &m): mutex(&m) {
|
||||
pthread_mutex_lock(mutex);
|
||||
}
|
||||
void unlock() {
|
||||
pthread_mutex_unlock(mutex);
|
||||
mutex = nullptr;
|
||||
}
|
||||
~mutex_guard() {
|
||||
if (mutex) pthread_mutex_unlock(mutex);
|
||||
}
|
||||
private:
|
||||
pthread_mutex_t *mutex;
|
||||
};
|
||||
|
||||
template <class Func>
|
||||
class run_finally {
|
||||
DISALLOW_COPY_AND_MOVE(run_finally)
|
||||
public:
|
||||
explicit run_finally(Func &&fn) : fn(std::move(fn)) {}
|
||||
~run_finally() { fn(); }
|
||||
private:
|
||||
Func fn;
|
||||
};
|
||||
|
||||
template<class T>
|
||||
static void default_new(T *&p) { p = new T(); }
|
||||
|
||||
template<class T>
|
||||
static void default_new(std::unique_ptr<T> &p) { p.reset(new T()); }
|
||||
|
||||
struct StringCmp {
|
||||
using is_transparent = void;
|
||||
bool operator()(std::string_view a, std::string_view b) const { return a < b; }
|
||||
};
|
||||
|
||||
struct heap_data;
|
||||
|
||||
using ByteSlice = rust::Slice<const uint8_t>;
|
||||
using MutByteSlice = rust::Slice<uint8_t>;
|
||||
|
||||
// Interchangeable as `&[u8]` in Rust
|
||||
struct byte_view {
|
||||
byte_view() : _buf(nullptr), _sz(0) {}
|
||||
byte_view(const void *buf, size_t sz) : _buf((uint8_t *) buf), _sz(sz) {}
|
||||
|
||||
// byte_view, or any of its subclasses, can be copied as byte_view
|
||||
byte_view(const byte_view &o) : _buf(o._buf), _sz(o._sz) {}
|
||||
|
||||
// Transparent conversion to Rust slice
|
||||
byte_view(const ByteSlice o) : byte_view(o.data(), o.size()) {}
|
||||
operator ByteSlice() const { return {_buf, _sz}; }
|
||||
|
||||
// String as bytes, including null terminator
|
||||
byte_view(const char *s) : byte_view(s, strlen(s) + 1) {}
|
||||
|
||||
const uint8_t *data() const { return _buf; }
|
||||
size_t size() const { return _sz; }
|
||||
bool contains(byte_view pattern) const;
|
||||
bool operator==(byte_view rhs) const;
|
||||
|
||||
protected:
|
||||
uint8_t *_buf;
|
||||
size_t _sz;
|
||||
};
|
||||
|
||||
// Interchangeable as `&mut [u8]` in Rust
|
||||
struct byte_data : public byte_view {
|
||||
byte_data() = default;
|
||||
byte_data(void *buf, size_t sz) : byte_view(buf, sz) {}
|
||||
|
||||
// byte_data, or any of its subclasses, can be copied as byte_data
|
||||
byte_data(const byte_data &o) : byte_data(o._buf, o._sz) {}
|
||||
|
||||
// Transparent conversion to Rust slice
|
||||
byte_data(const MutByteSlice o) : byte_data(o.data(), o.size()) {}
|
||||
operator MutByteSlice() const { return {_buf, _sz}; }
|
||||
|
||||
using byte_view::data;
|
||||
uint8_t *data() const { return _buf; }
|
||||
|
||||
void swap(byte_data &o);
|
||||
rust::Vec<size_t> patch(byte_view from, byte_view to) const;
|
||||
};
|
||||
|
||||
struct heap_data : public byte_data {
|
||||
ALLOW_MOVE_ONLY(heap_data)
|
||||
|
||||
heap_data() = default;
|
||||
explicit heap_data(size_t sz) : byte_data(calloc(sz, 1), sz) {}
|
||||
~heap_data() { free(_buf); }
|
||||
};
|
||||
|
||||
struct owned_fd {
|
||||
ALLOW_MOVE_ONLY(owned_fd)
|
||||
|
||||
owned_fd() : fd(-1) {}
|
||||
owned_fd(int fd) : fd(fd) {}
|
||||
~owned_fd() { close(fd); fd = -1; }
|
||||
|
||||
operator int() { return fd; }
|
||||
int release() { int f = fd; fd = -1; return f; }
|
||||
void swap(owned_fd &owned) { std::swap(fd, owned.fd); }
|
||||
|
||||
private:
|
||||
int fd;
|
||||
};
|
||||
|
||||
rust::Vec<size_t> mut_u8_patch(MutByteSlice buf, ByteSlice from, ByteSlice to);
|
||||
|
||||
uint32_t parse_uint32_hex(std::string_view s);
|
||||
int parse_int(std::string_view s);
|
||||
|
||||
using thread_entry = void *(*)(void *);
|
||||
extern "C" int new_daemon_thread(thread_entry entry, void *arg = nullptr);
|
||||
|
||||
static inline std::string rtrim(std::string &&s) {
|
||||
s.erase(std::find_if(s.rbegin(), s.rend(), [](unsigned char ch) {
|
||||
return !std::isspace(ch) && ch != '\0';
|
||||
}).base(), s.end());
|
||||
return std::move(s);
|
||||
}
|
||||
|
||||
int fork_dont_care();
|
||||
int fork_no_orphan();
|
||||
void init_argv0(int argc, char **argv);
|
||||
void set_nice_name(Utf8CStr name);
|
||||
int switch_mnt_ns(int pid);
|
||||
std::string &replace_all(std::string &str, std::string_view from, std::string_view to);
|
||||
std::vector<std::string> split(std::string_view s, std::string_view delims);
|
||||
|
||||
// Similar to vsnprintf, but the return value is the written number of bytes
|
||||
__printflike(3, 0) int vssprintf(char *dest, size_t size, const char *fmt, va_list ap);
|
||||
// Similar to snprintf, but the return value is the written number of bytes
|
||||
__printflike(3, 4) int ssprintf(char *dest, size_t size, const char *fmt, ...);
|
||||
// This is not actually the strscpy from the Linux kernel.
|
||||
// Silently truncates, and returns the number of bytes written.
|
||||
extern "C" size_t strscpy(char *dest, const char *src, size_t size);
|
||||
|
||||
// Ban usage of unsafe cstring functions
|
||||
#define vsnprintf __use_vssprintf_instead__
|
||||
#define snprintf __use_ssprintf_instead__
|
||||
#define strlcpy __use_strscpy_instead__
|
||||
|
||||
struct exec_t {
|
||||
bool err = false;
|
||||
int fd = -2;
|
||||
void (*pre_exec)() = nullptr;
|
||||
int (*fork)() = xfork;
|
||||
const char **argv = nullptr;
|
||||
};
|
||||
|
||||
int exec_command(exec_t &exec);
|
||||
template <class ...Args>
|
||||
int exec_command(exec_t &exec, Args &&...args) {
|
||||
const char *argv[] = {args..., nullptr};
|
||||
exec.argv = argv;
|
||||
return exec_command(exec);
|
||||
}
|
||||
int exec_command_sync(exec_t &exec);
|
||||
template <class ...Args>
|
||||
int exec_command_sync(exec_t &exec, Args &&...args) {
|
||||
const char *argv[] = {args..., nullptr};
|
||||
exec.argv = argv;
|
||||
return exec_command_sync(exec);
|
||||
}
|
||||
template <class ...Args>
|
||||
int exec_command_sync(Args &&...args) {
|
||||
exec_t exec;
|
||||
return exec_command_sync(exec, args...);
|
||||
}
|
||||
template <class ...Args>
|
||||
void exec_command_async(Args &&...args) {
|
||||
const char *argv[] = {args..., nullptr};
|
||||
exec_t exec {
|
||||
.fork = fork_dont_care,
|
||||
.argv = argv,
|
||||
};
|
||||
exec_command(exec);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
constexpr auto operator+(T e) noexcept ->
|
||||
std::enable_if_t<std::is_enum<T>::value, std::underlying_type_t<T>> {
|
||||
return static_cast<std::underlying_type_t<T>>(e);
|
||||
}
|
||||
|
||||
struct Utf8CStr {
|
||||
const char *data() const;
|
||||
size_t length() const;
|
||||
Utf8CStr(const char *s, size_t len);
|
||||
|
||||
Utf8CStr() : Utf8CStr("", 1) {};
|
||||
Utf8CStr(const Utf8CStr &o) = default;
|
||||
Utf8CStr(const char *s) : Utf8CStr(s, strlen(s) + 1) {};
|
||||
Utf8CStr(std::string s) : Utf8CStr(s.data(), s.length() + 1) {};
|
||||
const char *c_str() const { return this->data(); }
|
||||
size_t size() const { return this->length(); }
|
||||
bool empty() const { return this->length() == 0 ; }
|
||||
std::string_view sv() const { return {data(), length()}; }
|
||||
operator std::string_view() const { return sv(); }
|
||||
bool operator==(std::string_view rhs) const { return sv() == rhs; }
|
||||
|
||||
private:
|
||||
#pragma clang diagnostic push
|
||||
#pragma clang diagnostic ignored "-Wunused-private-field"
|
||||
std::array<std::uintptr_t, 2> repr;
|
||||
#pragma clang diagnostic pop
|
||||
};
|
||||
|
||||
// Bindings for std::function to be callable from Rust
|
||||
|
||||
using CxxFnBoolStrStr = std::function<bool(rust::Str, rust::Str)>;
|
||||
struct FnBoolStrStr : public CxxFnBoolStrStr {
|
||||
using CxxFnBoolStrStr::function;
|
||||
bool call(rust::Str a, rust::Str b) const {
|
||||
return operator()(a, b);
|
||||
}
|
||||
};
|
||||
using CxxFnBoolStr = std::function<bool(Utf8CStr)>;
|
||||
struct FnBoolStr : public CxxFnBoolStr {
|
||||
using CxxFnBoolStr::function;
|
||||
bool call(Utf8CStr s) const {
|
||||
return operator()(s);
|
||||
}
|
||||
};
|
||||
@@ -1,16 +1,13 @@
|
||||
use super::argh::{EarlyExit, MissingRequirements};
|
||||
use crate::{Utf8CStr, Utf8CString, cstr, ffi};
|
||||
use argh::{EarlyExit, MissingRequirements};
|
||||
use libc::c_char;
|
||||
use std::{
|
||||
fmt,
|
||||
fmt::Arguments,
|
||||
io::Write,
|
||||
mem::ManuallyDrop,
|
||||
process::exit,
|
||||
slice, str,
|
||||
sync::Arc,
|
||||
sync::atomic::{AtomicPtr, Ordering},
|
||||
};
|
||||
use std::fmt::Arguments;
|
||||
use std::io::Write;
|
||||
use std::mem::ManuallyDrop;
|
||||
use std::process::exit;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::{AtomicPtr, Ordering};
|
||||
use std::{fmt, slice, str};
|
||||
|
||||
pub fn errno() -> &'static mut i32 {
|
||||
unsafe { &mut *libc::__errno() }
|
||||
@@ -87,17 +84,16 @@ impl<T> EarlyExitExt<T> for Result<T, EarlyExit> {
|
||||
fn on_early_exit<F: FnOnce()>(self, print_help_msg: F) -> T {
|
||||
match self {
|
||||
Ok(t) => t,
|
||||
Err(EarlyExit { output, status }) => match status {
|
||||
Ok(_) => {
|
||||
Err(EarlyExit { output, is_help }) => {
|
||||
if is_help {
|
||||
print_help_msg();
|
||||
exit(0)
|
||||
}
|
||||
Err(_) => {
|
||||
} else {
|
||||
eprintln!("{output}");
|
||||
print_help_msg();
|
||||
exit(1)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -136,13 +132,9 @@ impl PositionalArgParser<'_> {
|
||||
}
|
||||
|
||||
fn ensure_end(&mut self) -> Result<(), EarlyExit> {
|
||||
if self.0.len() == 0 {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(EarlyExit::from(format!(
|
||||
"Unrecognized argument: {}\n",
|
||||
self.0.next().unwrap()
|
||||
)))
|
||||
match self.0.next() {
|
||||
None => Ok(()),
|
||||
Some(s) => Err(EarlyExit::from(format!("Unrecognized argument: {s}\n"))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include <unistd.h>
|
||||
#include <dirent.h>
|
||||
#include <stdio.h>
|
||||
#include <poll.h>
|
||||
#include <fcntl.h>
|
||||
|
||||
extern "C" {
|
||||
|
||||
FILE *xfopen(const char *pathname, const char *mode);
|
||||
FILE *xfdopen(int fd, const char *mode);
|
||||
int xopen(const char *pathname, int flags, mode_t mode = 0);
|
||||
int xopenat(int dirfd, const char *pathname, int flags, mode_t mode = 0);
|
||||
ssize_t xwrite(int fd, const void *buf, size_t count);
|
||||
ssize_t xread(int fd, void *buf, size_t count);
|
||||
ssize_t xxread(int fd, void *buf, size_t count);
|
||||
int xsetns(int fd, int nstype);
|
||||
int xunshare(int flags);
|
||||
DIR *xopendir(const char *name);
|
||||
DIR *xfdopendir(int fd);
|
||||
dirent *xreaddir(DIR *dirp);
|
||||
pid_t xsetsid();
|
||||
int xstat(const char *pathname, struct stat *buf);
|
||||
int xfstat(int fd, struct stat *buf);
|
||||
int xdup2(int oldfd, int newfd);
|
||||
ssize_t xreadlink(const char * __restrict__ pathname, char * __restrict__ buf, size_t bufsiz);
|
||||
ssize_t xreadlinkat(
|
||||
int dirfd, const char * __restrict__ pathname, char * __restrict__ buf, size_t bufsiz);
|
||||
int xsymlink(const char *target, const char *linkpath);
|
||||
int xmount(const char *source, const char *target,
|
||||
const char *filesystemtype, unsigned long mountflags,
|
||||
const void *data);
|
||||
int xumount2(const char *target, int flags);
|
||||
int xrename(const char *oldpath, const char *newpath);
|
||||
int xmkdir(const char *pathname, mode_t mode);
|
||||
int xmkdirs(const char *pathname, mode_t mode);
|
||||
ssize_t xsendfile(int out_fd, int in_fd, off_t *offset, size_t count);
|
||||
pid_t xfork();
|
||||
ssize_t xrealpath(const char * __restrict__ path, char * __restrict__ buf, size_t bufsiz);
|
||||
int xmknod(const char * pathname, mode_t mode, dev_t dev);
|
||||
|
||||
} // extern "C"
|
||||
@@ -35,21 +35,6 @@ unsafe extern "C" fn xrealpath(path: *const c_char, buf: *mut u8, bufsz: usize)
|
||||
}
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
unsafe extern "C" fn xreadlink(path: *const c_char, buf: *mut u8, bufsz: usize) -> isize {
|
||||
unsafe {
|
||||
match Utf8CStr::from_ptr(path) {
|
||||
Ok(path) => {
|
||||
let mut buf = cstr::buf::wrap_ptr(buf, bufsz);
|
||||
path.read_link(&mut buf)
|
||||
.log()
|
||||
.map_or(-1, |_| buf.len() as isize)
|
||||
}
|
||||
Err(_) => -1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
unsafe extern "C" fn xreadlinkat(
|
||||
dirfd: RawFd,
|
||||
@@ -201,16 +186,6 @@ extern "C" fn xsetsid() -> i32 {
|
||||
}
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
unsafe extern "C" fn xstat(path: *const c_char, buf: *mut libc::stat) -> i32 {
|
||||
unsafe {
|
||||
libc::stat(path, buf)
|
||||
.into_os_result("stat", ptr_to_str(path), None)
|
||||
.log()
|
||||
.unwrap_or(-1)
|
||||
}
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
unsafe extern "C" fn xfstat(fd: RawFd, buf: *mut libc::stat) -> i32 {
|
||||
unsafe {
|
||||
|
||||
@@ -7,17 +7,19 @@ edition.workspace = true
|
||||
crate-type = ["staticlib"]
|
||||
path = "lib.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[build-dependencies]
|
||||
cxx-gen = { workspace = true }
|
||||
pb-rs = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
base = { path = "../base" }
|
||||
base = { workspace = true }
|
||||
cxx = { workspace = true }
|
||||
byteorder = { workspace = true }
|
||||
size = { workspace = true }
|
||||
quick-protobuf = { workspace = true }
|
||||
argh = { workspace = true }
|
||||
sha1 = { workspace = true }
|
||||
sha2 = { workspace = true }
|
||||
digest = { workspace = true }
|
||||
|
||||
@@ -53,6 +53,19 @@ static bool check_env(const char *name) {
|
||||
return val != nullptr && val == "true"sv;
|
||||
}
|
||||
|
||||
static bool guess_lzma(const uint8_t *buf, size_t len) {
|
||||
// 0 : (pb * 5 + lp) * 9 + lc
|
||||
// 1 - 4 : dict size, must be 2^n
|
||||
// 5 - 12: all 0xFF
|
||||
if (len <= 13) return false;
|
||||
if (memcmp(buf, "\x5d", 1) != 0) return false;
|
||||
uint32_t dict_sz = 0;
|
||||
memcpy(&dict_sz, buf + 1, sizeof(dict_sz));
|
||||
if (dict_sz == 0 || (dict_sz & (dict_sz - 1)) != 0) return false;
|
||||
if (memcmp(buf + 5, "\xff\xff\xff\xff\xff\xff\xff\xff", 8) != 0) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
FileFormat check_fmt(const void *buf, size_t len) {
|
||||
if (CHECKED_MATCH(CHROMEOS_MAGIC)) {
|
||||
return FileFormat::CHROMEOS;
|
||||
@@ -66,8 +79,7 @@ FileFormat check_fmt(const void *buf, size_t len) {
|
||||
return FileFormat::LZOP;
|
||||
} else if (CHECKED_MATCH(XZ_MAGIC)) {
|
||||
return FileFormat::XZ;
|
||||
} else if (len >= 13 && memcmp(buf, "\x5d\x00\x00", 3) == 0
|
||||
&& (((char *)buf)[12] == '\xff' || ((char *)buf)[12] == '\x00')) {
|
||||
} else if (guess_lzma(static_cast<const uint8_t *>(buf), len)) {
|
||||
return FileFormat::LZMA;
|
||||
} else if (CHECKED_MATCH(BZIP_MAGIC)) {
|
||||
return FileFormat::BZIP2;
|
||||
@@ -279,9 +291,10 @@ static int find_dtb_offset(const uint8_t *buf, unsigned sz) {
|
||||
|
||||
auto fdt_hdr = reinterpret_cast<const fdt_header *>(curr);
|
||||
|
||||
// Check that fdt_header.totalsize does not overflow kernel image size
|
||||
// Check that fdt_header.totalsize does not overflow kernel image size or is empty dtb
|
||||
// https://github.com/torvalds/linux/commit/7b937cc243e5b1df8780a0aa743ce800df6c68d1
|
||||
uint32_t totalsize = fdt_hdr->totalsize;
|
||||
if (totalsize > end - curr)
|
||||
if (totalsize > end - curr || totalsize <= 0x48)
|
||||
continue;
|
||||
|
||||
// Check that fdt_header.off_dt_struct does not overflow kernel image size
|
||||
@@ -598,7 +611,9 @@ bool boot_img::parse_image(const uint8_t *addr, FileFormat type) {
|
||||
int split_image_dtb(Utf8CStr filename, bool skip_decomp) {
|
||||
mmap_data img(filename.c_str());
|
||||
|
||||
if (size_t off = find_dtb_offset(img.data(), img.size()); off > 0) {
|
||||
if (int offset = find_dtb_offset(img.data(), img.size()); offset > 0) {
|
||||
size_t off = (size_t) offset;
|
||||
|
||||
FileFormat fmt = check_fmt_lg(img.data(), img.size());
|
||||
if (!skip_decomp && fmt_compressed(fmt)) {
|
||||
int fd = creat(KERNEL_FILE, 0644);
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
use pb_rs::{ConfigBuilder, types::FileDescriptor};
|
||||
use pb_rs::ConfigBuilder;
|
||||
use pb_rs::types::FileDescriptor;
|
||||
|
||||
use crate::codegen::gen_cxx_binding;
|
||||
|
||||
#[path = "../include/codegen.rs"]
|
||||
mod codegen;
|
||||
|
||||
#[allow(clippy::unwrap_used)]
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=proto/update_metadata.proto");
|
||||
|
||||
|
||||
@@ -6,9 +6,11 @@ use crate::patch::hexpatch;
|
||||
use crate::payload::extract_boot_from_payload;
|
||||
use crate::sign::{sha1_hash, sign_boot_image};
|
||||
use argh::{CommandInfo, EarlyExit, FromArgs, SubCommand};
|
||||
use base::libc::umask;
|
||||
use base::nix::fcntl::OFlag;
|
||||
use base::{
|
||||
CmdArgs, EarlyExitExt, LoggedResult, MappedFile, PositionalArgParser, ResultExt, Utf8CStr,
|
||||
Utf8CString, WriteExt, cmdline_logging, cstr, libc::umask, log_err, nix::fcntl::OFlag,
|
||||
Utf8CString, WriteExt, argh, cmdline_logging, cstr, log_err,
|
||||
};
|
||||
use std::ffi::c_char;
|
||||
use std::io::{Seek, SeekFrom, Write};
|
||||
@@ -41,9 +43,9 @@ enum Action {
|
||||
#[derive(FromArgs)]
|
||||
#[argh(subcommand, name = "unpack")]
|
||||
struct Unpack {
|
||||
#[argh(switch, short = 'n')]
|
||||
#[argh(switch, short = 'n', long = none)]
|
||||
no_decompress: bool,
|
||||
#[argh(switch, short = 'h')]
|
||||
#[argh(switch, short = 'h', long = none)]
|
||||
dump_header: bool,
|
||||
#[argh(positional)]
|
||||
img: Utf8CString,
|
||||
@@ -52,12 +54,12 @@ struct Unpack {
|
||||
#[derive(FromArgs)]
|
||||
#[argh(subcommand, name = "repack")]
|
||||
struct Repack {
|
||||
#[argh(switch, short = 'n')]
|
||||
#[argh(switch, short = 'n', long = none)]
|
||||
no_compress: bool,
|
||||
#[argh(positional)]
|
||||
img: Utf8CString,
|
||||
#[argh(positional, default = r#"Utf8CString::from("new-boot.img")"#)]
|
||||
out: Utf8CString,
|
||||
#[argh(positional)]
|
||||
out: Option<Utf8CString>,
|
||||
}
|
||||
|
||||
#[derive(FromArgs)]
|
||||
@@ -75,33 +77,24 @@ struct Sign {
|
||||
#[argh(positional)]
|
||||
img: Utf8CString,
|
||||
#[argh(positional)]
|
||||
args: Vec<Utf8CString>,
|
||||
name: Option<Utf8CString>,
|
||||
#[argh(positional)]
|
||||
cert: Option<Utf8CString>,
|
||||
#[argh(positional)]
|
||||
key: Option<Utf8CString>,
|
||||
}
|
||||
|
||||
#[derive(FromArgs)]
|
||||
#[argh(subcommand, name = "extract")]
|
||||
struct Extract {
|
||||
#[argh(positional)]
|
||||
payload: Utf8CString,
|
||||
#[argh(positional)]
|
||||
partition: Option<Utf8CString>,
|
||||
#[argh(positional)]
|
||||
outfile: Option<Utf8CString>,
|
||||
}
|
||||
|
||||
impl FromArgs for Extract {
|
||||
fn from_args(_command_name: &[&str], args: &[&str]) -> Result<Self, EarlyExit> {
|
||||
let mut parse = PositionalArgParser(args.iter());
|
||||
Ok(Extract {
|
||||
payload: parse.required("payload.bin")?,
|
||||
partition: parse.optional(),
|
||||
outfile: parse.last_optional()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl SubCommand for Extract {
|
||||
const COMMAND: &'static CommandInfo = &CommandInfo {
|
||||
name: "extract",
|
||||
description: "",
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(FromArgs)]
|
||||
#[argh(subcommand, name = "hexpatch")]
|
||||
struct HexPatch {
|
||||
@@ -134,7 +127,7 @@ struct Dtb {
|
||||
#[derive(FromArgs)]
|
||||
#[argh(subcommand, name = "split")]
|
||||
struct Split {
|
||||
#[argh(switch, short = 'n')]
|
||||
#[argh(switch, short = 'n', long = none)]
|
||||
no_decompress: bool,
|
||||
#[argh(positional)]
|
||||
file: Utf8CString,
|
||||
@@ -184,28 +177,15 @@ impl SubCommand for Compress {
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(FromArgs)]
|
||||
#[argh(subcommand, name = "decompress")]
|
||||
struct Decompress {
|
||||
#[argh(positional)]
|
||||
file: Utf8CString,
|
||||
#[argh(positional)]
|
||||
out: Option<Utf8CString>,
|
||||
}
|
||||
|
||||
impl FromArgs for Decompress {
|
||||
fn from_args(_command_name: &[&str], args: &[&str]) -> Result<Self, EarlyExit> {
|
||||
let mut iter = PositionalArgParser(args.iter());
|
||||
Ok(Decompress {
|
||||
file: iter.required("infile")?,
|
||||
out: iter.last_optional()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl SubCommand for Decompress {
|
||||
const COMMAND: &'static CommandInfo = &CommandInfo {
|
||||
name: "decompress",
|
||||
description: "",
|
||||
};
|
||||
}
|
||||
|
||||
fn print_usage(cmd: &str) {
|
||||
eprintln!(
|
||||
r#"MagiskBoot - Boot Image Modification Tool
|
||||
@@ -225,7 +205,7 @@ Supported actions:
|
||||
dumped to the file 'header', which can be used to modify header
|
||||
configurations during repacking.
|
||||
Return values:
|
||||
0:valid 1:error 2:chromeos
|
||||
0:valid 1:error 2:chromeos 3:vendor_boot
|
||||
|
||||
repack [-n] <origbootimg> [outbootimg]
|
||||
Repack boot image components using files from the current directory
|
||||
@@ -384,21 +364,24 @@ fn boot_main(cmds: CmdArgs) -> LoggedResult<i32> {
|
||||
img,
|
||||
out,
|
||||
}) => {
|
||||
repack(&img, &out, no_compress);
|
||||
repack(
|
||||
&img,
|
||||
out.as_deref().unwrap_or(cstr!("new-boot.img")),
|
||||
no_compress,
|
||||
);
|
||||
}
|
||||
Action::Verify(Verify { img, cert }) => {
|
||||
if !verify_cmd(&img, cert.as_deref()) {
|
||||
return log_err!();
|
||||
}
|
||||
}
|
||||
Action::Sign(Sign { img, args }) => {
|
||||
let mut iter = args.iter();
|
||||
sign_cmd(
|
||||
&img,
|
||||
iter.next().map(AsRef::as_ref),
|
||||
iter.next().map(AsRef::as_ref),
|
||||
iter.next().map(AsRef::as_ref),
|
||||
)?;
|
||||
Action::Sign(Sign {
|
||||
img,
|
||||
name,
|
||||
cert,
|
||||
key,
|
||||
}) => {
|
||||
sign_cmd(&img, name.as_deref(), cert.as_deref(), key.as_deref())?;
|
||||
}
|
||||
Action::Extract(Extract {
|
||||
payload,
|
||||
|
||||
@@ -1,13 +1,17 @@
|
||||
use crate::ffi::{FileFormat, check_fmt};
|
||||
use base::{
|
||||
Chunker, FileOrStd, LoggedResult, ReadExt, ResultExt, Utf8CStr, Utf8CString, WriteExt, log_err,
|
||||
nix::fcntl::OFlag,
|
||||
};
|
||||
use bzip2::{Compression as BzCompression, read::BzDecoder, write::BzEncoder};
|
||||
use flate2::{Compression as GzCompression, read::MultiGzDecoder, write::GzEncoder};
|
||||
use base::nix::fcntl::OFlag;
|
||||
use base::{Chunker, FileOrStd, LoggedResult, ReadExt, Utf8CStr, Utf8CString, WriteExt, log_err};
|
||||
use bzip2::Compression as BzCompression;
|
||||
use bzip2::read::BzDecoder;
|
||||
use bzip2::write::BzEncoder;
|
||||
use flate2::Compression as GzCompression;
|
||||
use flate2::read::MultiGzDecoder;
|
||||
use flate2::write::GzEncoder;
|
||||
use lz4::block::CompressionMode;
|
||||
use lz4::liblz4::BlockChecksum;
|
||||
use lz4::{
|
||||
BlockMode, BlockSize, ContentChecksum, Decoder as LZ4FrameDecoder, Encoder as LZ4FrameEncoder,
|
||||
EncoderBuilder as LZ4FrameEncoderBuilder, block::CompressionMode, liblz4::BlockChecksum,
|
||||
EncoderBuilder as LZ4FrameEncoderBuilder,
|
||||
};
|
||||
use lzma_rust2::{CheckType, LzmaOptions, LzmaReader, LzmaWriter, XzOptions, XzReader, XzWriter};
|
||||
use std::cmp::min;
|
||||
@@ -212,16 +216,21 @@ impl<R: Read> Read for LZ4BlockDecoder<R> {
|
||||
|
||||
// Top-level APIs
|
||||
|
||||
pub fn get_encoder<'a, W: Write + 'a>(format: FileFormat, w: W) -> Box<dyn WriteFinish<W> + 'a> {
|
||||
match format {
|
||||
pub fn get_encoder<'a, W: Write + 'a>(
|
||||
format: FileFormat,
|
||||
w: W,
|
||||
) -> std::io::Result<Box<dyn WriteFinish<W> + 'a>> {
|
||||
Ok(match format {
|
||||
FileFormat::XZ => {
|
||||
let mut opt = XzOptions::with_preset(9);
|
||||
opt.set_check_sum_type(CheckType::Crc32);
|
||||
Box::new(XzWriter::new(w, opt).unwrap())
|
||||
}
|
||||
FileFormat::LZMA => {
|
||||
Box::new(LzmaWriter::new_use_header(w, &LzmaOptions::with_preset(9), None).unwrap())
|
||||
Box::new(XzWriter::new(w, opt)?)
|
||||
}
|
||||
FileFormat::LZMA => Box::new(LzmaWriter::new_use_header(
|
||||
w,
|
||||
&LzmaOptions::with_preset(9),
|
||||
None,
|
||||
)?),
|
||||
FileFormat::BZIP2 => Box::new(BzEncoder::new(w, BzCompression::best())),
|
||||
FileFormat::LZ4 => {
|
||||
let encoder = LZ4FrameEncoderBuilder::new()
|
||||
@@ -231,8 +240,7 @@ pub fn get_encoder<'a, W: Write + 'a>(format: FileFormat, w: W) -> Box<dyn Write
|
||||
.block_checksum(BlockChecksum::BlockChecksumEnabled)
|
||||
.level(9)
|
||||
.auto_flush(true)
|
||||
.build(w)
|
||||
.unwrap();
|
||||
.build(w)?;
|
||||
Box::new(encoder)
|
||||
}
|
||||
FileFormat::LZ4_LEGACY => Box::new(LZ4BlockEncoder::new(w, false)),
|
||||
@@ -240,27 +248,30 @@ pub fn get_encoder<'a, W: Write + 'a>(format: FileFormat, w: W) -> Box<dyn Write
|
||||
FileFormat::ZOPFLI => {
|
||||
// These options are already better than gzip -9
|
||||
let opt = ZopfliOptions {
|
||||
iteration_count: NonZeroU64::new(1).unwrap(),
|
||||
iteration_count: unsafe { NonZeroU64::new_unchecked(1) },
|
||||
maximum_block_splits: 1,
|
||||
..Default::default()
|
||||
};
|
||||
Box::new(ZopFliEncoder::new_buffered(opt, BlockType::Dynamic, w).unwrap())
|
||||
Box::new(ZopFliEncoder::new_buffered(opt, BlockType::Dynamic, w)?)
|
||||
}
|
||||
FileFormat::GZIP => Box::new(GzEncoder::new(w, GzCompression::best())),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_decoder<'a, R: Read + 'a>(format: FileFormat, r: R) -> Box<dyn Read + 'a> {
|
||||
match format {
|
||||
pub fn get_decoder<'a, R: Read + 'a>(
|
||||
format: FileFormat,
|
||||
r: R,
|
||||
) -> std::io::Result<Box<dyn Read + 'a>> {
|
||||
Ok(match format {
|
||||
FileFormat::XZ => Box::new(XzReader::new(r, true)),
|
||||
FileFormat::LZMA => Box::new(LzmaReader::new_mem_limit(r, u32::MAX, None).unwrap()),
|
||||
FileFormat::LZMA => Box::new(LzmaReader::new_mem_limit(r, u32::MAX, None)?),
|
||||
FileFormat::BZIP2 => Box::new(BzDecoder::new(r)),
|
||||
FileFormat::LZ4 => Box::new(LZ4FrameDecoder::new(r).unwrap()),
|
||||
FileFormat::LZ4 => Box::new(LZ4FrameDecoder::new(r)?),
|
||||
FileFormat::LZ4_LG | FileFormat::LZ4_LEGACY => Box::new(LZ4BlockDecoder::new(r)),
|
||||
FileFormat::ZOPFLI | FileFormat::GZIP => Box::new(MultiGzDecoder::new(r)),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// C++ FFI
|
||||
@@ -268,9 +279,9 @@ pub fn get_decoder<'a, R: Read + 'a>(format: FileFormat, r: R) -> Box<dyn Read +
|
||||
pub fn compress_bytes(format: FileFormat, in_bytes: &[u8], out_fd: RawFd) {
|
||||
let mut out_file = unsafe { ManuallyDrop::new(File::from_raw_fd(out_fd)) };
|
||||
|
||||
let mut encoder = get_encoder(format, out_file.deref_mut());
|
||||
let _: LoggedResult<()> = try {
|
||||
encoder.write_all(in_bytes)?;
|
||||
let mut encoder = get_encoder(format, out_file.deref_mut())?;
|
||||
std::io::copy(&mut Cursor::new(in_bytes), encoder.deref_mut())?;
|
||||
encoder.finish()?;
|
||||
};
|
||||
}
|
||||
@@ -278,8 +289,10 @@ pub fn compress_bytes(format: FileFormat, in_bytes: &[u8], out_fd: RawFd) {
|
||||
pub fn decompress_bytes(format: FileFormat, in_bytes: &[u8], out_fd: RawFd) {
|
||||
let mut out_file = unsafe { ManuallyDrop::new(File::from_raw_fd(out_fd)) };
|
||||
|
||||
let mut decoder = get_decoder(format, in_bytes);
|
||||
std::io::copy(decoder.as_mut(), out_file.deref_mut()).log_ok();
|
||||
let _: LoggedResult<()> = try {
|
||||
let mut decoder = get_decoder(format, in_bytes)?;
|
||||
std::io::copy(decoder.as_mut(), out_file.deref_mut())?;
|
||||
};
|
||||
}
|
||||
|
||||
// Command-line entry points
|
||||
@@ -335,7 +348,7 @@ pub(crate) fn decompress_cmd(infile: &Utf8CStr, outfile: Option<&Utf8CStr>) -> L
|
||||
FileOrStd::File(outfile.create(OFlag::O_WRONLY | OFlag::O_TRUNC, 0o644)?)
|
||||
};
|
||||
|
||||
let mut decoder = get_decoder(format, Cursor::new(buf).chain(input.as_file()));
|
||||
let mut decoder = get_decoder(format, Cursor::new(buf).chain(input.as_file()))?;
|
||||
std::io::copy(decoder.as_mut(), &mut output.as_file())?;
|
||||
|
||||
if rm_in {
|
||||
@@ -378,7 +391,7 @@ pub(crate) fn compress_cmd(
|
||||
FileOrStd::File(outfile)
|
||||
};
|
||||
|
||||
let mut encoder = get_encoder(method, output.as_file());
|
||||
let mut encoder = get_encoder(method, output.as_file())?;
|
||||
std::io::copy(&mut input.as_file(), encoder.as_mut())?;
|
||||
encoder.finish()?;
|
||||
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
#![allow(clippy::useless_conversion)]
|
||||
|
||||
use argh::FromArgs;
|
||||
use base::argh;
|
||||
use bytemuck::{Pod, Zeroable, from_bytes};
|
||||
use num_traits::cast::AsPrimitive;
|
||||
use size::{Base, Size, Style};
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
use std::fmt::{Display, Formatter};
|
||||
@@ -9,11 +14,6 @@ use std::mem::size_of;
|
||||
use std::process::exit;
|
||||
use std::str;
|
||||
|
||||
use argh::FromArgs;
|
||||
use bytemuck::{Pod, Zeroable, from_bytes};
|
||||
use num_traits::cast::AsPrimitive;
|
||||
use size::{Base, Size, Style};
|
||||
|
||||
use crate::check_env;
|
||||
use crate::compress::{get_decoder, get_encoder};
|
||||
use crate::ffi::FileFormat;
|
||||
@@ -23,9 +23,10 @@ use base::libc::{
|
||||
S_IWOTH, S_IWUSR, S_IXGRP, S_IXOTH, S_IXUSR, dev_t, gid_t, major, makedev, minor, mknod,
|
||||
mode_t, uid_t,
|
||||
};
|
||||
use base::nix::fcntl::OFlag;
|
||||
use base::{
|
||||
BytesExt, EarlyExitExt, LoggedResult, MappedFile, OptionExt, ResultExt, Utf8CStr, Utf8CStrBuf,
|
||||
WriteExt, cstr, log_err, nix::fcntl::OFlag,
|
||||
WriteExt, cstr, log_err,
|
||||
};
|
||||
|
||||
#[derive(FromArgs)]
|
||||
@@ -136,10 +137,10 @@ struct Add {
|
||||
#[derive(FromArgs)]
|
||||
#[argh(subcommand, name = "ls")]
|
||||
struct List {
|
||||
#[argh(positional, default = r#"String::from("/")"#)]
|
||||
path: String,
|
||||
#[argh(switch, short = 'r')]
|
||||
recursive: bool,
|
||||
#[argh(positional, default = r#"String::from("/")"#)]
|
||||
path: String,
|
||||
}
|
||||
|
||||
pub(crate) fn print_cpio_usage() {
|
||||
@@ -483,10 +484,9 @@ impl Cpio {
|
||||
};
|
||||
for (name, entry) in &self.entries {
|
||||
let p = "/".to_string() + name.as_str();
|
||||
if !p.starts_with(&path) {
|
||||
let Some(p) = p.strip_prefix(&path) else {
|
||||
continue;
|
||||
}
|
||||
let p = p.strip_prefix(&path).unwrap();
|
||||
};
|
||||
if !p.is_empty() && !p.starts_with('/') {
|
||||
continue;
|
||||
}
|
||||
@@ -613,8 +613,11 @@ impl Cpio {
|
||||
o.rm(".backup", true);
|
||||
self.rm(".backup", true);
|
||||
|
||||
let mut lhs = o.entries.into_iter().peekable();
|
||||
let mut rhs = self.entries.iter().peekable();
|
||||
let mut left_iter = o.entries.into_iter();
|
||||
let mut right_iter = self.entries.iter();
|
||||
|
||||
let mut lhs = left_iter.next();
|
||||
let mut rhs = right_iter.next();
|
||||
|
||||
loop {
|
||||
enum Action<'a> {
|
||||
@@ -622,32 +625,38 @@ impl Cpio {
|
||||
Record(&'a String),
|
||||
Noop,
|
||||
}
|
||||
let action = match (lhs.peek(), rhs.peek()) {
|
||||
(Some((l, _)), Some((r, re))) => match l.as_str().cmp(r.as_str()) {
|
||||
|
||||
// Move the iterator forward if needed
|
||||
if lhs.is_none() {
|
||||
lhs = left_iter.next();
|
||||
}
|
||||
if rhs.is_none() {
|
||||
rhs = right_iter.next();
|
||||
}
|
||||
|
||||
let action = match (lhs.take(), rhs.take()) {
|
||||
(Some((ln, le)), Some((rn, re))) => match ln.as_str().cmp(rn.as_str()) {
|
||||
Ordering::Less => {
|
||||
let (l, le) = lhs.next().unwrap();
|
||||
Action::Backup(l, le)
|
||||
// Put rhs back
|
||||
rhs = Some((rn, re));
|
||||
Action::Backup(ln, le)
|
||||
}
|
||||
Ordering::Greater => {
|
||||
// Put lhs back
|
||||
lhs = Some((ln, le));
|
||||
Action::Record(rn)
|
||||
}
|
||||
Ordering::Greater => Action::Record(rhs.next().unwrap().0),
|
||||
Ordering::Equal => {
|
||||
let (l, le) = lhs.next().unwrap();
|
||||
let action = if re.data != le.data {
|
||||
Action::Backup(l, le)
|
||||
if re.data != le.data {
|
||||
Action::Backup(ln, le)
|
||||
} else {
|
||||
Action::Noop
|
||||
};
|
||||
rhs.next();
|
||||
action
|
||||
}
|
||||
}
|
||||
},
|
||||
(Some(_), None) => {
|
||||
let (l, le) = lhs.next().unwrap();
|
||||
Action::Backup(l, le)
|
||||
}
|
||||
(None, Some(_)) => Action::Record(rhs.next().unwrap().0),
|
||||
(None, None) => {
|
||||
break;
|
||||
}
|
||||
(Some((ln, le)), None) => Action::Backup(ln, le),
|
||||
(None, Some((rn, _))) => Action::Record(rn),
|
||||
(None, None) => break,
|
||||
};
|
||||
match action {
|
||||
Action::Backup(name, mut entry) => {
|
||||
@@ -690,8 +699,8 @@ impl CpioEntry {
|
||||
if self.mode & S_IFMT != S_IFREG {
|
||||
return false;
|
||||
}
|
||||
let mut encoder = get_encoder(FileFormat::XZ, Vec::new());
|
||||
let Ok(data): std::io::Result<Vec<u8>> = (try {
|
||||
let mut encoder = get_encoder(FileFormat::XZ, Vec::new())?;
|
||||
encoder.write_all(&self.data)?;
|
||||
encoder.finish()?
|
||||
}) else {
|
||||
@@ -709,7 +718,7 @@ impl CpioEntry {
|
||||
}
|
||||
|
||||
let Ok(data): std::io::Result<Vec<u8>> = (try {
|
||||
let mut decoder = get_decoder(FileFormat::XZ, Cursor::new(&self.data));
|
||||
let mut decoder = get_decoder(FileFormat::XZ, Cursor::new(&self.data))?;
|
||||
let mut data = Vec::new();
|
||||
std::io::copy(decoder.as_mut(), &mut data)?;
|
||||
data
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
use argh::FromArgs;
|
||||
use base::{LoggedResult, MappedFile, Utf8CStr, argh};
|
||||
use fdt::node::{FdtNode, NodeProperty};
|
||||
use fdt::{Fdt, FdtError};
|
||||
use std::cell::UnsafeCell;
|
||||
|
||||
use argh::FromArgs;
|
||||
use fdt::{
|
||||
Fdt, FdtError,
|
||||
node::{FdtNode, NodeProperty},
|
||||
};
|
||||
|
||||
use base::{LoggedResult, MappedFile, Utf8CStr};
|
||||
|
||||
use crate::{check_env, patch::patch_verity};
|
||||
use crate::check_env;
|
||||
use crate::patch::patch_verity;
|
||||
|
||||
#[derive(FromArgs)]
|
||||
#[argh(subcommand)]
|
||||
@@ -21,7 +18,7 @@ pub(crate) enum DtbAction {
|
||||
#[derive(FromArgs)]
|
||||
#[argh(subcommand, name = "print")]
|
||||
pub(crate) struct Print {
|
||||
#[argh(switch, short = 'f')]
|
||||
#[argh(switch, short = 'f', long = none)]
|
||||
fstab: bool,
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
#![feature(format_args_nl)]
|
||||
#![feature(btree_extract_if)]
|
||||
#![feature(iter_intersperse)]
|
||||
#![feature(try_blocks)]
|
||||
|
||||
|
||||
@@ -1,15 +1,13 @@
|
||||
use crate::compress::get_decoder;
|
||||
use crate::ffi::check_fmt;
|
||||
use crate::proto::update_metadata::{DeltaArchiveManifest, mod_InstallOperation::Type};
|
||||
use crate::proto::update_metadata::DeltaArchiveManifest;
|
||||
use crate::proto::update_metadata::mod_InstallOperation::Type;
|
||||
use base::{LoggedError, LoggedResult, ReadSeekExt, ResultExt, WriteExt, error};
|
||||
use byteorder::{BigEndian, ReadBytesExt};
|
||||
use quick_protobuf::{BytesReader, MessageRead};
|
||||
use std::io::Cursor;
|
||||
use std::{
|
||||
fs::File,
|
||||
io::{BufReader, Read, Seek, SeekFrom, Write},
|
||||
os::fd::FromRawFd,
|
||||
};
|
||||
use std::fs::File;
|
||||
use std::io::{BufReader, Cursor, Read, Seek, SeekFrom, Write};
|
||||
use std::os::fd::FromRawFd;
|
||||
|
||||
macro_rules! bad_payload {
|
||||
($msg:literal) => {{
|
||||
@@ -166,8 +164,8 @@ pub fn extract_boot_from_payload(
|
||||
out_file.seek(SeekFrom::Start(out_offset))?;
|
||||
let fmt = check_fmt(data);
|
||||
|
||||
let mut decoder = get_decoder(fmt, Cursor::new(data));
|
||||
let Ok(_): std::io::Result<()> = (try {
|
||||
let mut decoder = get_decoder(fmt, Cursor::new(data))?;
|
||||
std::io::copy(decoder.as_mut(), &mut out_file)?;
|
||||
}) else {
|
||||
return Err(bad_payload!("decompression failed"));
|
||||
|
||||
@@ -15,13 +15,15 @@ check-signature = []
|
||||
check-client = []
|
||||
su-check-db = []
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[build-dependencies]
|
||||
cxx-gen = { workspace = true }
|
||||
pb-rs = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
base = { path = "../base", features = ["selinux"] }
|
||||
derive = { path = "derive" }
|
||||
base = { workspace = true, features = ["selinux"] }
|
||||
cxx = { workspace = true }
|
||||
num-traits = { workspace = true }
|
||||
num-derive = { workspace = true }
|
||||
@@ -29,6 +31,5 @@ quick-protobuf = { workspace = true }
|
||||
bytemuck = { workspace = true, features = ["derive"] }
|
||||
thiserror = { workspace = true }
|
||||
bit-set = { workspace = true }
|
||||
argh = { workspace = true }
|
||||
nix = { workspace = true, features = ["fs", "mount", "poll", "signal", "term", "user", "zerocopy"] }
|
||||
bitflags = { workspace = true }
|
||||
|
||||
@@ -82,7 +82,7 @@ impl MagiskD {
|
||||
Command::new(&tmp_bb)
|
||||
.arg("--install")
|
||||
.arg("-s")
|
||||
.arg(tmp_bb.parent_dir().unwrap())
|
||||
.arg(tmp_bb.parent_dir().unwrap_or_default())
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null())
|
||||
.status()
|
||||
@@ -186,13 +186,13 @@ impl MagiskD {
|
||||
setup_preinit_dir();
|
||||
self.ensure_manager();
|
||||
if self.zygisk_enabled.load(Ordering::Relaxed) {
|
||||
self.zygisk.lock().unwrap().reset(true);
|
||||
self.zygisk.lock().reset(true);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn boot_stage_handler(&self, client: UnixStream, code: RequestCode) {
|
||||
// Make sure boot stage execution is always serialized
|
||||
let mut state = self.boot_stage_lock.lock().unwrap();
|
||||
let mut state = self.boot_stage_lock.lock();
|
||||
|
||||
match code {
|
||||
RequestCode::POST_FS_DATA => {
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
use pb_rs::{ConfigBuilder, types::FileDescriptor};
|
||||
use pb_rs::ConfigBuilder;
|
||||
use pb_rs::types::FileDescriptor;
|
||||
|
||||
use crate::codegen::gen_cxx_binding;
|
||||
|
||||
#[path = "../include/codegen.rs"]
|
||||
mod codegen;
|
||||
|
||||
#[allow(clippy::unwrap_used)]
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=resetprop/proto/persistent_properties.proto");
|
||||
|
||||
|
||||
@@ -21,20 +21,19 @@ use base::{
|
||||
AtomicArc, BufReadExt, FileAttr, FsPathBuilder, LoggedResult, ReadExt, ResultExt, Utf8CStr,
|
||||
Utf8CStrBuf, WriteExt, cstr, fork_dont_care, info, libc, log_err, set_nice_name,
|
||||
};
|
||||
use nix::{
|
||||
fcntl::OFlag,
|
||||
mount::MsFlags,
|
||||
sys::signal::SigSet,
|
||||
unistd::{dup2_stderr, dup2_stdin, dup2_stdout, getpid, getuid, setsid},
|
||||
};
|
||||
use nix::fcntl::OFlag;
|
||||
use nix::mount::MsFlags;
|
||||
use nix::sys::signal::SigSet;
|
||||
use nix::unistd::{dup2_stderr, dup2_stdin, dup2_stdout, getpid, getuid, setsid};
|
||||
use num_traits::AsPrimitive;
|
||||
use std::fmt::Write as _;
|
||||
use std::io::{BufReader, Write};
|
||||
use std::os::fd::{AsFd, AsRawFd, IntoRawFd, RawFd};
|
||||
use std::os::unix::net::{UCred, UnixListener, UnixStream};
|
||||
use std::process::{Command, exit};
|
||||
use std::sync::OnceLock;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::{Mutex, OnceLock};
|
||||
use std::sync::nonpoison::Mutex;
|
||||
use std::time::Duration;
|
||||
|
||||
// Global magiskd singleton
|
||||
@@ -107,7 +106,7 @@ impl MagiskD {
|
||||
denylist_handler(-1);
|
||||
|
||||
// Restore native bridge property
|
||||
self.zygisk.lock().unwrap().restore_prop();
|
||||
self.zygisk.lock().restore_prop();
|
||||
|
||||
client.write_pod(&0).log_ok();
|
||||
|
||||
@@ -131,7 +130,7 @@ impl MagiskD {
|
||||
self.prune_su_access();
|
||||
scan_deny_apps();
|
||||
if self.zygisk_enabled.load(Ordering::Relaxed) {
|
||||
self.zygisk.lock().unwrap().reset(false);
|
||||
self.zygisk.lock().reset(false);
|
||||
}
|
||||
}
|
||||
RequestCode::SQLITE_CMD => {
|
||||
|
||||
@@ -187,7 +187,7 @@ unsafe extern "C" fn read_db_row<T: SqlTable>(
|
||||
|
||||
impl MagiskD {
|
||||
fn with_db<F: FnOnce(*mut sqlite3) -> i32>(&self, f: F) -> i32 {
|
||||
let mut db = self.sql_connection.lock().unwrap();
|
||||
let mut db = self.sql_connection.lock();
|
||||
if db.is_none() {
|
||||
let raw_db = open_and_init_db();
|
||||
*db = NonNull::new(raw_db).map(Sqlite3);
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
use proc_macro::TokenStream;
|
||||
|
||||
mod decodable;
|
||||
|
||||
#[proc_macro_derive(Decodable)]
|
||||
pub fn derive_decodable(input: TokenStream) -> TokenStream {
|
||||
decodable::derive_decodable(input)
|
||||
}
|
||||
@@ -1,16 +1,18 @@
|
||||
#![feature(try_blocks)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(fn_traits)]
|
||||
#![feature(unix_socket_ancillary_data)]
|
||||
#![feature(unix_socket_peek)]
|
||||
#![feature(default_field_values)]
|
||||
#![feature(peer_credentials_unix_socket)]
|
||||
#![feature(sync_nonpoison)]
|
||||
#![feature(nonpoison_mutex)]
|
||||
#![feature(nonpoison_condvar)]
|
||||
#![allow(clippy::missing_safety_doc)]
|
||||
|
||||
use crate::ffi::SuRequest;
|
||||
use crate::socket::Encodable;
|
||||
use base::derive::Decodable;
|
||||
use daemon::{MagiskD, connect_daemon_for_cxx};
|
||||
use derive::Decodable;
|
||||
use logging::{android_logging, zygisk_close_logd, zygisk_get_logd, zygisk_logging};
|
||||
use magisk::magisk_main;
|
||||
use mount::revert_unmount;
|
||||
|
||||
@@ -1,17 +1,16 @@
|
||||
use crate::consts::{LOG_PIPE, LOGFILE};
|
||||
use crate::ffi::get_magisk_tmp;
|
||||
use crate::logging::LogFile::{Actual, Buffer};
|
||||
use base::const_format::concatcp;
|
||||
use base::{
|
||||
FsPathBuilder, LogLevel, LoggedResult, ReadExt, ResultExt, Utf8CStr, Utf8CStrBuf, WriteExt,
|
||||
const_format::concatcp, cstr, libc, new_daemon_thread, raw_cstr, update_logger,
|
||||
cstr, libc, new_daemon_thread, raw_cstr, update_logger,
|
||||
};
|
||||
use bytemuck::{Pod, Zeroable, bytes_of, write_zeroes};
|
||||
use libc::{PIPE_BUF, c_char, localtime_r, sigtimedwait, time_t, timespec, tm};
|
||||
use nix::{
|
||||
fcntl::OFlag,
|
||||
sys::signal::{SigSet, SigmaskHow, Signal},
|
||||
unistd::{Gid, Uid, chown, getpid, gettid},
|
||||
};
|
||||
use nix::fcntl::OFlag;
|
||||
use nix::sys::signal::{SigSet, SigmaskHow, Signal};
|
||||
use nix::unistd::{Gid, Uid, chown, getpid, gettid};
|
||||
use num_derive::{FromPrimitive, ToPrimitive};
|
||||
use num_traits::FromPrimitive;
|
||||
use std::cmp::min;
|
||||
@@ -21,9 +20,10 @@ use std::io::{IoSlice, Read, Write};
|
||||
use std::mem::ManuallyDrop;
|
||||
use std::os::fd::{FromRawFd, IntoRawFd, RawFd};
|
||||
use std::ptr::null_mut;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::{AtomicI32, Ordering};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
use std::sync::nonpoison::Mutex;
|
||||
use std::time::{Duration, SystemTime, UNIX_EPOCH};
|
||||
use std::{fs, io};
|
||||
|
||||
#[allow(dead_code, non_camel_case_types)]
|
||||
@@ -118,12 +118,12 @@ fn write_log_to_pipe(mut logd: &File, prio: i32, msg: &Utf8CStr) -> io::Result<u
|
||||
static MAGISK_LOGD_FD: Mutex<Option<Arc<File>>> = Mutex::new(None);
|
||||
|
||||
fn with_logd_fd<R, F: FnOnce(&File) -> io::Result<R>>(f: F) {
|
||||
let fd = MAGISK_LOGD_FD.lock().unwrap().clone();
|
||||
let fd = MAGISK_LOGD_FD.lock().clone();
|
||||
if let Some(logd) = fd
|
||||
&& f(&logd).is_err()
|
||||
{
|
||||
// If any error occurs, shut down the logd pipe
|
||||
*MAGISK_LOGD_FD.lock().unwrap() = None;
|
||||
*MAGISK_LOGD_FD.lock() = None;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -266,7 +266,9 @@ fn logfile_write_loop(mut pipe: File) -> io::Result<()> {
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
|
||||
let now = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.unwrap_or(Duration::ZERO);
|
||||
|
||||
// Note: the obvious better implementation is to use the rust chrono crate, however
|
||||
// the crate cannot fetch the proper local timezone without pulling in a bunch of
|
||||
@@ -323,7 +325,7 @@ pub fn start_log_daemon() {
|
||||
let file = unsafe { File::from_raw_fd(arg as RawFd) };
|
||||
logfile_write_loop(file).ok();
|
||||
// If any error occurs, shut down the logd pipe
|
||||
*MAGISK_LOGD_FD.lock().unwrap() = None;
|
||||
*MAGISK_LOGD_FD.lock() = None;
|
||||
0
|
||||
}
|
||||
|
||||
@@ -332,7 +334,7 @@ pub fn start_log_daemon() {
|
||||
chown(path.as_utf8_cstr(), Some(Uid::from(0)), Some(Gid::from(0)))?;
|
||||
let read = path.open(OFlag::O_RDWR | OFlag::O_CLOEXEC)?;
|
||||
let write = path.open(OFlag::O_WRONLY | OFlag::O_CLOEXEC)?;
|
||||
*MAGISK_LOGD_FD.lock().unwrap() = Some(Arc::new(write));
|
||||
*MAGISK_LOGD_FD.lock() = Some(Arc::new(write));
|
||||
unsafe {
|
||||
new_daemon_thread(logfile_writer_thread, read.into_raw_fd() as usize);
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ use crate::mount::find_preinit_device;
|
||||
use crate::selinux::restorecon;
|
||||
use crate::socket::{Decodable, Encodable};
|
||||
use argh::FromArgs;
|
||||
use base::{CmdArgs, EarlyExitExt, LoggedResult, Utf8CString, clone_attr};
|
||||
use base::{CmdArgs, EarlyExitExt, LoggedResult, Utf8CString, argh, clone_attr};
|
||||
use nix::poll::{PollFd, PollFlags, PollTimeout};
|
||||
use std::ffi::c_char;
|
||||
use std::os::fd::AsFd;
|
||||
|
||||
@@ -8,7 +8,9 @@ use base::{
|
||||
Utf8CStrBuf, Utf8CString, WalkResult, clone_attr, cstr, debug, error, info, libc, raw_cstr,
|
||||
warn,
|
||||
};
|
||||
use nix::{fcntl::OFlag, mount::MsFlags, unistd::UnlinkatFlags};
|
||||
use nix::fcntl::OFlag;
|
||||
use nix::mount::MsFlags;
|
||||
use nix::unistd::UnlinkatFlags;
|
||||
use std::collections::BTreeMap;
|
||||
use std::os::fd::IntoRawFd;
|
||||
use std::path::{Component, Path};
|
||||
@@ -887,7 +889,7 @@ impl MagiskD {
|
||||
|
||||
// Handle zygisk
|
||||
if self.zygisk_enabled.load(Ordering::Acquire) {
|
||||
let mut zygisk = self.zygisk.lock().unwrap();
|
||||
let mut zygisk = self.zygisk.lock();
|
||||
zygisk.set_prop();
|
||||
inject_zygisk_bins(&zygisk.lib_name, &mut system);
|
||||
}
|
||||
|
||||
@@ -6,12 +6,12 @@ use base::{
|
||||
debug, info, libc, parse_mount_info, warn,
|
||||
};
|
||||
use libc::{c_uint, dev_t};
|
||||
use nix::{
|
||||
mount::MsFlags,
|
||||
sys::stat::{Mode, SFlag, mknod},
|
||||
};
|
||||
use nix::mount::MsFlags;
|
||||
use nix::sys::stat::{Mode, SFlag, mknod};
|
||||
use num_traits::AsPrimitive;
|
||||
use std::{cmp::Ordering::Greater, cmp::Ordering::Less, path::Path, path::PathBuf};
|
||||
use std::cmp::Ordering::{Greater, Less};
|
||||
use std::ffi::OsStr;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
pub fn setup_preinit_dir() {
|
||||
let magisk_tmp = get_magisk_tmp();
|
||||
@@ -204,9 +204,8 @@ pub fn find_preinit_device() -> String {
|
||||
}
|
||||
Path::new(&info.source)
|
||||
.file_name()
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.and_then(OsStr::to_str)
|
||||
.unwrap_or_default()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
|
||||
@@ -441,7 +441,7 @@ impl MagiskD {
|
||||
}
|
||||
|
||||
pub fn preserve_stub_apk(&self) {
|
||||
let mut info = self.manager_info.lock().unwrap();
|
||||
let mut info = self.manager_info.lock();
|
||||
|
||||
let apk = cstr::buf::default()
|
||||
.join_path(get_magisk_tmp())
|
||||
@@ -458,19 +458,19 @@ impl MagiskD {
|
||||
}
|
||||
|
||||
pub fn get_manager_uid(&self, user: i32) -> i32 {
|
||||
let mut info = self.manager_info.lock().unwrap();
|
||||
let mut info = self.manager_info.lock();
|
||||
let (uid, _) = info.get_manager(self, user, false);
|
||||
uid
|
||||
}
|
||||
|
||||
pub fn get_manager(&self, user: i32, install: bool) -> (i32, String) {
|
||||
let mut info = self.manager_info.lock().unwrap();
|
||||
let mut info = self.manager_info.lock();
|
||||
let (uid, pkg) = info.get_manager(self, user, install);
|
||||
(uid, pkg.to_string())
|
||||
}
|
||||
|
||||
pub fn ensure_manager(&self) {
|
||||
let mut info = self.manager_info.lock().unwrap();
|
||||
let mut info = self.manager_info.lock();
|
||||
let _ = info.get_manager(self, 0, true);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
use super::{
|
||||
PropInfo, PropReader, SYS_PROP,
|
||||
persist::{persist_delete_prop, persist_get_all_props, persist_get_prop, persist_set_prop},
|
||||
use super::persist::{
|
||||
persist_delete_prop, persist_get_all_props, persist_get_prop, persist_set_prop,
|
||||
};
|
||||
use super::{PropInfo, PropReader, SYS_PROP};
|
||||
use argh::{EarlyExit, FromArgs, MissingRequirements};
|
||||
use base::libc::PROP_VALUE_MAX;
|
||||
use base::{
|
||||
BufReadExt, CmdArgs, EarlyExitExt, LogLevel, LoggedResult, ResultExt, Utf8CStr, Utf8CStrBuf,
|
||||
Utf8CString, cstr, debug, log_err, set_log_level_state,
|
||||
Utf8CString, argh, cstr, debug, log_err, set_log_level_state,
|
||||
};
|
||||
use nix::fcntl::OFlag;
|
||||
use std::collections::BTreeMap;
|
||||
@@ -17,21 +17,21 @@ use std::io::BufReader;
|
||||
struct ResetProp {
|
||||
#[argh(switch, short = 'v')]
|
||||
verbose: bool,
|
||||
#[argh(switch, short = 'w')]
|
||||
#[argh(switch, short = 'w', long = none)]
|
||||
wait_mode: bool,
|
||||
#[argh(switch, short = 'p')]
|
||||
#[argh(switch, short = 'p', long = none)]
|
||||
persist: bool,
|
||||
#[argh(switch, short = 'P')]
|
||||
#[argh(switch, short = 'P', long = none)]
|
||||
persist_only: bool,
|
||||
#[argh(switch, short = 'Z')]
|
||||
#[argh(switch, short = 'Z', long = none)]
|
||||
context: bool,
|
||||
#[argh(switch, short = 'n')]
|
||||
#[argh(switch, short = 'n', long = none)]
|
||||
skip_svc: bool,
|
||||
#[argh(option, short = 'f')]
|
||||
file: Option<Utf8CString>,
|
||||
#[argh(option, long = "delete", short = 'd')]
|
||||
#[argh(option, short = 'd', long = "delete")]
|
||||
delete_key: Option<Utf8CString>,
|
||||
#[argh(positional)]
|
||||
#[argh(positional, greedy = true)]
|
||||
args: Vec<Utf8CString>,
|
||||
}
|
||||
|
||||
@@ -57,7 +57,7 @@ Wait mode arguments (toggled with -w):
|
||||
|
||||
General flags:
|
||||
-h,--help show this message
|
||||
-v print verbose output to stderr
|
||||
-v,--verbose print verbose output to stderr
|
||||
-w switch to wait mode
|
||||
|
||||
Read mode flags:
|
||||
|
||||
@@ -1,20 +1,17 @@
|
||||
use nix::fcntl::OFlag;
|
||||
use quick_protobuf::{BytesReader, MessageRead, MessageWrite, Writer};
|
||||
use std::io::Read;
|
||||
use std::{
|
||||
fs::File,
|
||||
io::{BufWriter, Write},
|
||||
os::fd::FromRawFd,
|
||||
};
|
||||
use std::fs::File;
|
||||
use std::io::{BufWriter, Read, Write};
|
||||
use std::os::fd::FromRawFd;
|
||||
|
||||
use crate::resetprop::PropReader;
|
||||
use crate::resetprop::proto::persistent_properties::{
|
||||
PersistentProperties, mod_PersistentProperties::PersistentPropertyRecord,
|
||||
};
|
||||
use crate::resetprop::proto::persistent_properties::PersistentProperties;
|
||||
use crate::resetprop::proto::persistent_properties::mod_PersistentProperties::PersistentPropertyRecord;
|
||||
use base::const_format::concatcp;
|
||||
use base::libc::mkstemp;
|
||||
use base::{
|
||||
Directory, FsPathBuilder, LibcReturn, LoggedResult, MappedFile, SilentLogExt, Utf8CStr,
|
||||
Utf8CStrBuf, WalkResult, clone_attr, cstr, debug, libc::mkstemp, log_err,
|
||||
Utf8CStrBuf, WalkResult, clone_attr, cstr, debug, log_err,
|
||||
};
|
||||
|
||||
const PERSIST_PROP_DIR: &str = "/data/property";
|
||||
|
||||
@@ -8,14 +8,14 @@ use ExtraVal::{Bool, Int, IntList, Str};
|
||||
use base::{
|
||||
BytesExt, FileAttr, LibcReturn, LoggedResult, ResultExt, Utf8CStrBuf, cstr, fork_dont_care,
|
||||
};
|
||||
use nix::{
|
||||
fcntl::OFlag,
|
||||
poll::{PollFd, PollFlags, PollTimeout},
|
||||
};
|
||||
use nix::fcntl::OFlag;
|
||||
use nix::poll::{PollFd, PollFlags, PollTimeout};
|
||||
use num_traits::AsPrimitive;
|
||||
use std::fmt::Write;
|
||||
use std::fs::File;
|
||||
use std::os::fd::AsFd;
|
||||
use std::os::unix::net::UCred;
|
||||
use std::{fmt::Write, fs::File, process::Command, process::exit};
|
||||
use std::process::{Command, exit};
|
||||
|
||||
struct Extra<'a> {
|
||||
key: &'static str,
|
||||
@@ -44,7 +44,9 @@ impl Extra<'_> {
|
||||
IntList(list) => {
|
||||
cmd.args(["--es", self.key]);
|
||||
let mut tmp = String::new();
|
||||
list.iter().for_each(|i| write!(&mut tmp, "{i},").unwrap());
|
||||
list.iter().for_each(|i| {
|
||||
write!(&mut tmp, "{i},").ok();
|
||||
});
|
||||
tmp.pop();
|
||||
cmd.arg(&tmp);
|
||||
}
|
||||
@@ -67,7 +69,9 @@ impl Extra<'_> {
|
||||
IntList(list) => {
|
||||
tmp = format!("{}:s:", self.key);
|
||||
if !list.is_empty() {
|
||||
list.iter().for_each(|i| write!(&mut tmp, "{i},").unwrap());
|
||||
list.iter().for_each(|i| {
|
||||
write!(&mut tmp, "{i},").ok();
|
||||
});
|
||||
tmp.pop();
|
||||
}
|
||||
}
|
||||
@@ -202,8 +206,11 @@ impl SuAppContext<'_> {
|
||||
let mut pfd = [PollFd::new(fd.as_fd(), PollFlags::POLLIN)];
|
||||
|
||||
// Wait for data input for at most 70 seconds
|
||||
nix::poll::poll(&mut pfd, PollTimeout::try_from(70 * 1000).unwrap())
|
||||
.check_os_err("poll", None, None)?;
|
||||
nix::poll::poll(
|
||||
&mut pfd,
|
||||
PollTimeout::try_from(70 * 1000).unwrap_or(PollTimeout::NONE),
|
||||
)
|
||||
.check_os_err("poll", None, None)?;
|
||||
fd
|
||||
};
|
||||
|
||||
|
||||
@@ -7,11 +7,12 @@ use crate::socket::IpcRead;
|
||||
use base::{LoggedResult, ResultExt, WriteExt, debug, error, exit_on_error, libc, warn};
|
||||
use std::os::fd::IntoRawFd;
|
||||
use std::os::unix::net::{UCred, UnixStream};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::sync::Arc;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
#[allow(unused_imports)]
|
||||
use std::os::fd::AsRawFd;
|
||||
use std::sync::nonpoison::Mutex;
|
||||
|
||||
const DEFAULT_SHELL: &str = "/system/bin/sh";
|
||||
|
||||
@@ -132,7 +133,7 @@ impl MagiskD {
|
||||
|
||||
let info = self.get_su_info(cred.uid as i32);
|
||||
{
|
||||
let mut access = info.access.lock().unwrap();
|
||||
let mut access = info.access.lock();
|
||||
|
||||
// Talk to su manager
|
||||
let mut app = SuAppContext {
|
||||
@@ -203,7 +204,7 @@ impl MagiskD {
|
||||
}
|
||||
|
||||
let cached = self.cached_su_info.load();
|
||||
if cached.uid == uid && cached.access.lock().unwrap().is_fresh() {
|
||||
if cached.uid == uid && cached.access.lock().is_fresh() {
|
||||
return cached;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
use base::{FileOrStd, LibcReturn, LoggedResult, OsResult, ResultExt, libc, warn};
|
||||
use libc::{STDIN_FILENO, TIOCGWINSZ, TIOCSWINSZ, c_int, winsize};
|
||||
use nix::{
|
||||
fcntl::{OFlag, SpliceFFlags},
|
||||
poll::{PollFd, PollFlags, PollTimeout, poll},
|
||||
sys::signal::{SigSet, Signal, raise},
|
||||
sys::signalfd::{SfdFlags, SignalFd},
|
||||
sys::termios::{SetArg, Termios, cfmakeraw, tcgetattr, tcsetattr},
|
||||
unistd::pipe2,
|
||||
};
|
||||
use nix::fcntl::{OFlag, SpliceFFlags};
|
||||
use nix::poll::{PollFd, PollFlags, PollTimeout, poll};
|
||||
use nix::sys::signal::{SigSet, Signal, raise};
|
||||
use nix::sys::signalfd::{SfdFlags, SignalFd};
|
||||
use nix::sys::termios::{SetArg, Termios, cfmakeraw, tcgetattr, tcsetattr};
|
||||
use nix::unistd::pipe2;
|
||||
use std::fs::File;
|
||||
use std::io::{Read, Write};
|
||||
use std::mem::MaybeUninit;
|
||||
@@ -101,7 +99,6 @@ fn pump_tty_impl(ptmx: File, pump_stdin: bool) -> LoggedResult<()> {
|
||||
let mut signal_fd: Option<SignalFd> = None;
|
||||
|
||||
let raw_ptmx = ptmx.as_raw_fd();
|
||||
let mut raw_sig = -1;
|
||||
|
||||
let mut poll_fds = Vec::with_capacity(3);
|
||||
poll_fds.push(PollFd::new(ptmx.as_fd(), PollFlags::POLLIN));
|
||||
@@ -113,12 +110,14 @@ fn pump_tty_impl(ptmx: File, pump_stdin: bool) -> LoggedResult<()> {
|
||||
.check_os_err("pthread_sigmask", None, None)?;
|
||||
let sig = SignalFd::with_flags(&set, SfdFlags::SFD_CLOEXEC)
|
||||
.into_os_result("signalfd", None, None)?;
|
||||
raw_sig = sig.as_raw_fd();
|
||||
signal_fd = Some(sig);
|
||||
poll_fds.push(PollFd::new(
|
||||
signal_fd.as_ref().unwrap().as_fd(),
|
||||
PollFlags::POLLIN,
|
||||
));
|
||||
unsafe {
|
||||
// SAFETY: signal_fd is always Some
|
||||
poll_fds.push(PollFd::new(
|
||||
signal_fd.as_ref().unwrap_unchecked().as_fd(),
|
||||
PollFlags::POLLIN,
|
||||
));
|
||||
}
|
||||
|
||||
// We also need to pump stdin to ptmx
|
||||
poll_fds.push(PollFd::new(
|
||||
@@ -143,10 +142,12 @@ fn pump_tty_impl(ptmx: File, pump_stdin: bool) -> LoggedResult<()> {
|
||||
if raw_fd == STDIN_FILENO {
|
||||
pump_via_splice(FileOrStd::StdIn.as_file(), &ptmx, &pipe_fd)?;
|
||||
} else if raw_fd == raw_ptmx {
|
||||
pump_via_splice(&ptmx, FileOrStd::StdIn.as_file(), &pipe_fd)?;
|
||||
} else if raw_fd == raw_sig {
|
||||
pump_via_splice(&ptmx, FileOrStd::StdOut.as_file(), &pipe_fd)?;
|
||||
} else if let Some(sig) = &signal_fd
|
||||
&& raw_fd == sig.as_raw_fd()
|
||||
{
|
||||
sync_winsize(raw_ptmx);
|
||||
signal_fd.as_ref().unwrap().read_signal()?;
|
||||
sig.read_signal()?;
|
||||
}
|
||||
} else if pfd
|
||||
.revents()
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
#include <sched.h>
|
||||
#include <sys/types.h>
|
||||
#include <sys/stat.h>
|
||||
#include <sys/mount.h>
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use base::{ResultExt, new_daemon_thread};
|
||||
use nix::{sys::signal::SigSet, unistd::getpid, unistd::gettid};
|
||||
use std::sync::{Condvar, LazyLock, Mutex, WaitTimeoutResult};
|
||||
use nix::sys::signal::SigSet;
|
||||
use nix::unistd::{getpid, gettid};
|
||||
use std::sync::nonpoison::{Condvar, Mutex};
|
||||
use std::sync::{LazyLock, WaitTimeoutResult};
|
||||
use std::time::Duration;
|
||||
|
||||
static THREAD_POOL: LazyLock<ThreadPool> = LazyLock::new(ThreadPool::default);
|
||||
@@ -32,16 +34,16 @@ impl ThreadPool {
|
||||
|
||||
let task: Option<Box<dyn FnOnce() + Send>>;
|
||||
{
|
||||
let mut info = self.info.lock().unwrap();
|
||||
let mut info = self.info.lock();
|
||||
info.idle_threads += 1;
|
||||
if info.task.is_none() {
|
||||
if is_core_pool {
|
||||
// Core pool never closes, wait forever.
|
||||
info = self.task_is_some.wait(info).unwrap();
|
||||
info = self.task_is_some.wait(info);
|
||||
} else {
|
||||
let dur = Duration::from_secs(THREAD_IDLE_MAX_SEC);
|
||||
let result: WaitTimeoutResult;
|
||||
(info, result) = self.task_is_some.wait_timeout(info, dur).unwrap();
|
||||
(info, result) = self.task_is_some.wait_timeout(info, dur);
|
||||
if result.timed_out() {
|
||||
// Terminate thread after timeout
|
||||
info.idle_threads -= 1;
|
||||
@@ -71,10 +73,10 @@ impl ThreadPool {
|
||||
0
|
||||
}
|
||||
|
||||
let mut info = self.info.lock().unwrap();
|
||||
let mut info = self.info.lock();
|
||||
while info.task.is_some() {
|
||||
// Wait until task is none
|
||||
info = self.task_is_none.wait(info).unwrap();
|
||||
info = self.task_is_none.wait(info);
|
||||
}
|
||||
info.task = Some(Box::new(f));
|
||||
if info.idle_threads == 0 {
|
||||
|
||||
@@ -87,25 +87,23 @@ impl ZygiskState {
|
||||
}
|
||||
}
|
||||
|
||||
let socket = if let Some(fd) = socket {
|
||||
fd
|
||||
if let Some(fd) = socket {
|
||||
fd.send_fds(&[client.as_raw_fd()])?;
|
||||
} else {
|
||||
// Create a new socket pair and fork zygiskd process
|
||||
let (local, remote) = UnixStream::pair()?;
|
||||
let (mut local, remote) = UnixStream::pair()?;
|
||||
if fork_dont_care() == 0 {
|
||||
exec_zygiskd(is_64_bit, remote);
|
||||
}
|
||||
*socket = Some(local);
|
||||
let local = socket.as_mut().unwrap();
|
||||
if let Some(module_fds) = daemon.get_module_fds(is_64_bit) {
|
||||
local.send_fds(&module_fds)?;
|
||||
}
|
||||
if local.read_decodable::<i32>()? != 0 {
|
||||
return log_err!();
|
||||
}
|
||||
local
|
||||
};
|
||||
socket.send_fds(&[client.as_raw_fd()])?;
|
||||
local.send_fds(&[client.as_raw_fd()])?;
|
||||
*socket = Some(local);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -168,7 +166,6 @@ impl MagiskD {
|
||||
ZygiskRequest::ConnectCompanion => self
|
||||
.zygisk
|
||||
.lock()
|
||||
.unwrap()
|
||||
.connect_zygiskd(client, self)
|
||||
.log_with_msg(|w| w.write_str("zygiskd startup error"))?,
|
||||
ZygiskRequest::GetModDir => self.get_mod_dir(client)?,
|
||||
@@ -222,9 +219,12 @@ impl MagiskD {
|
||||
let failed_ids: Vec<i32> = client.read_decodable()?;
|
||||
if let Some(module_list) = self.module_list.get() {
|
||||
for id in failed_ids {
|
||||
let Some(module) = module_list.get(id as usize) else {
|
||||
continue;
|
||||
};
|
||||
let path = cstr::buf::default()
|
||||
.join_path(MODULEROOT)
|
||||
.join_path(&module_list[id as usize].name)
|
||||
.join_path(&module.name)
|
||||
.join_path("zygisk");
|
||||
// Create the unloaded marker file
|
||||
if let Ok(dir) = Directory::open(&path) {
|
||||
@@ -240,7 +240,13 @@ impl MagiskD {
|
||||
|
||||
fn get_mod_dir(&self, mut client: UnixStream) -> LoggedResult<()> {
|
||||
let id: i32 = client.read_decodable()?;
|
||||
let module = &self.module_list.get().unwrap()[id as usize];
|
||||
let Some(module) = self
|
||||
.module_list
|
||||
.get()
|
||||
.and_then(|list| list.get(id as usize))
|
||||
else {
|
||||
return Ok(());
|
||||
};
|
||||
let dir = cstr::buf::default()
|
||||
.join_path(MODULEROOT)
|
||||
.join_path(&module.name);
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#include <sys/mount.h>
|
||||
#include <android/dlext.h>
|
||||
#include <dlfcn.h>
|
||||
#include <poll.h>
|
||||
|
||||
#include <base.hpp>
|
||||
#include <core.hpp>
|
||||
|
||||
@@ -1,238 +1,591 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
primitives = ['jint', 'jboolean', 'jlong']
|
||||
primitives = ["jint", "jboolean", "jlong"]
|
||||
|
||||
|
||||
class JType:
|
||||
def __init__(self, cpp, jni):
|
||||
def __init__(self, cpp: str, jni: str):
|
||||
self.cpp = cpp
|
||||
self.jni = jni
|
||||
|
||||
|
||||
class JArray(JType):
|
||||
def __init__(self, type):
|
||||
def __init__(self, type: JType):
|
||||
if type.cpp in primitives:
|
||||
name = type.cpp + 'Array'
|
||||
name = type.cpp + "Array"
|
||||
else:
|
||||
name = 'jobjectArray'
|
||||
super().__init__(name, '[' + type.jni)
|
||||
name = "jobjectArray"
|
||||
super().__init__(name, "[" + type.jni)
|
||||
|
||||
|
||||
class Argument:
|
||||
def __init__(self, name, type, set_arg = False):
|
||||
def __init__(self, name: str, type: JType, set_arg=False):
|
||||
self.name = name
|
||||
self.type = type
|
||||
self.set_arg = set_arg
|
||||
|
||||
def cpp(self):
|
||||
return f'{self.type.cpp} {self.name}'
|
||||
def cpp(self) -> str:
|
||||
return f"{self.type.cpp} {self.name}"
|
||||
|
||||
|
||||
# Args we don't care, give it an auto generated name
|
||||
class Anon(Argument):
|
||||
cnt = 0
|
||||
def __init__(self, type):
|
||||
super().__init__(f'_{Anon.cnt}', type)
|
||||
|
||||
def __init__(self, type: JType):
|
||||
super().__init__(f"_{Anon.cnt}", type)
|
||||
Anon.cnt += 1
|
||||
|
||||
|
||||
class Return:
|
||||
def __init__(self, value, type):
|
||||
def __init__(self, value: str, type: JType):
|
||||
self.value = value
|
||||
self.type = type
|
||||
|
||||
class Method:
|
||||
def __init__(self, name, ret, args):
|
||||
|
||||
class JNIMethod:
|
||||
def __init__(self, name: str, ret: Return, args: list[Argument]):
|
||||
self.name = name
|
||||
self.ret = ret
|
||||
self.args = args
|
||||
|
||||
def cpp(self):
|
||||
return ', '.join(map(lambda x: x.cpp(), self.args))
|
||||
def arg_list_name(self) -> str:
|
||||
return "env, clazz, " + ", ".join(map(lambda x: x.name, self.args))
|
||||
|
||||
def name_list(self):
|
||||
return ', '.join(map(lambda x: x.name, self.args))
|
||||
def arg_list_cpp(self) -> str:
|
||||
return "JNIEnv *env, jclass clazz, " + ", ".join(
|
||||
map(lambda x: x.cpp(), self.args)
|
||||
)
|
||||
|
||||
def jni(self):
|
||||
args = ''.join(map(lambda x: x.type.jni, self.args))
|
||||
return f'({args}){self.ret.type.jni}'
|
||||
def cpp_fn_type(self) -> str:
|
||||
return f"{self.ret.type.cpp}(*)({self.arg_list_cpp()}"
|
||||
|
||||
def body(self, name, i):
|
||||
return ''
|
||||
def cpp_lambda_sig(self) -> str:
|
||||
return f"[] [[clang::no_stack_protector]] ({self.arg_list_cpp()}) static -> {self.ret.type.cpp}"
|
||||
|
||||
class JNIHook(Method):
|
||||
def __init__(self, ver, ret, args):
|
||||
name = f'{self.base_name()}_{ver}'
|
||||
def jni_sig(self):
|
||||
args = "".join(map(lambda x: x.type.jni, self.args))
|
||||
return f"({args}){self.ret.type.jni}"
|
||||
|
||||
|
||||
class JNIHook(JNIMethod):
|
||||
def __init__(self, ver: str, ret: Return, args: list[Argument]):
|
||||
name = f"{self.hook_target()}_{ver}"
|
||||
super().__init__(name, ret, args)
|
||||
|
||||
def base_name(self):
|
||||
return ''
|
||||
def hook_target(self):
|
||||
return ""
|
||||
|
||||
def body(self, orig_fn_ptr: str):
|
||||
return ""
|
||||
|
||||
def orig_method(self, name, i):
|
||||
return f'reinterpret_cast<{self.ret.type.cpp}(*)(JNIEnv *env, jclass clazz, {self.cpp()})>(g_hook->{name}_methods[{i}].fnPtr)'
|
||||
|
||||
def ind(i):
|
||||
return '\n' + ' ' * i
|
||||
return "\n" + " " * i
|
||||
|
||||
|
||||
# Common types
|
||||
jint = JType('jint', 'I')
|
||||
jint = JType("jint", "I")
|
||||
jintArray = JArray(jint)
|
||||
jstring = JType('jstring', 'Ljava/lang/String;')
|
||||
jboolean = JType('jboolean', 'Z')
|
||||
jlong = JType('jlong', 'J')
|
||||
void = JType('void', 'V')
|
||||
jstring = JType("jstring", "Ljava/lang/String;")
|
||||
jboolean = JType("jboolean", "Z")
|
||||
jlong = JType("jlong", "J")
|
||||
void = JType("void", "V")
|
||||
|
||||
class ForkAndSpec(JNIHook):
|
||||
|
||||
class ForkApp(JNIHook):
|
||||
def __init__(self, ver, args):
|
||||
super().__init__(ver, Return('ctx.pid', jint), args)
|
||||
super().__init__(ver, Return("ctx.pid", jint), args)
|
||||
|
||||
def base_name(self):
|
||||
return 'nativeForkAndSpecialize'
|
||||
def hook_target(self):
|
||||
return "nativeForkAndSpecialize"
|
||||
|
||||
def init_args(self):
|
||||
return 'AppSpecializeArgs_v5 args(uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, nice_name, instruction_set, app_data_dir);'
|
||||
return "AppSpecializeArgs_v5 args(uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, nice_name, instruction_set, app_data_dir);"
|
||||
|
||||
def body(self, name, i):
|
||||
decl = ''
|
||||
def body(self, orig_fn_ptr: str):
|
||||
decl = ""
|
||||
decl += ind(3) + self.init_args()
|
||||
for a in self.args:
|
||||
if a.set_arg:
|
||||
decl += ind(3) + f'args.{a.name} = &{a.name};'
|
||||
decl += ind(3) + 'ZygiskContext ctx(env, &args);'
|
||||
decl += ind(3) + f'ctx.{self.base_name()}_pre();'
|
||||
decl += ind(3) + self.orig_method(name, i) + '('
|
||||
decl += ind(4) + f'env, clazz, {self.name_list()}'
|
||||
decl += ind(3) + ');'
|
||||
decl += ind(3) + f'ctx.{self.base_name()}_post();'
|
||||
decl += ind(3) + f"args.{a.name} = &{a.name};"
|
||||
decl += ind(3) + "ZygiskContext ctx(env, &args);"
|
||||
decl += ind(3) + f"ctx.{self.hook_target()}_pre();"
|
||||
decl += ind(3) + f"reinterpret_cast<{self.cpp_fn_type()})>({orig_fn_ptr})("
|
||||
decl += ind(4) + self.arg_list_name()
|
||||
decl += ind(3) + ");"
|
||||
decl += ind(3) + f"ctx.{self.hook_target()}_post();"
|
||||
if self.ret.value:
|
||||
decl += ind(3) + f"return {self.ret.value};"
|
||||
return decl
|
||||
|
||||
class SpecApp(ForkAndSpec):
|
||||
def __init__(self, ver, args):
|
||||
|
||||
class SpecializeApp(ForkApp):
|
||||
def __init__(self, ver: str, args: list[Argument]):
|
||||
super().__init__(ver, args)
|
||||
self.ret = Return('', void)
|
||||
self.ret = Return("", void)
|
||||
|
||||
def base_name(self):
|
||||
return 'nativeSpecializeAppProcess'
|
||||
def hook_target(self):
|
||||
return "nativeSpecializeAppProcess"
|
||||
|
||||
class ForkServer(ForkAndSpec):
|
||||
def base_name(self):
|
||||
return 'nativeForkSystemServer'
|
||||
|
||||
class ForkServer(ForkApp):
|
||||
def hook_target(self):
|
||||
return "nativeForkSystemServer"
|
||||
|
||||
def init_args(self):
|
||||
return 'ServerSpecializeArgs_v1 args(uid, gid, gids, runtime_flags, permitted_capabilities, effective_capabilities);'
|
||||
return "ServerSpecializeArgs_v1 args(uid, gid, gids, runtime_flags, permitted_capabilities, effective_capabilities);"
|
||||
|
||||
|
||||
# Common args
|
||||
uid = Argument('uid', jint)
|
||||
gid = Argument('gid', jint)
|
||||
gids = Argument('gids', jintArray)
|
||||
runtime_flags = Argument('runtime_flags', jint)
|
||||
rlimits = Argument('rlimits', JArray(jintArray))
|
||||
mount_external = Argument('mount_external', jint)
|
||||
se_info = Argument('se_info', jstring)
|
||||
nice_name = Argument('nice_name', jstring)
|
||||
fds_to_close = Argument('fds_to_close', jintArray)
|
||||
instruction_set = Argument('instruction_set', jstring)
|
||||
app_data_dir = Argument('app_data_dir', jstring)
|
||||
uid = Argument("uid", jint)
|
||||
gid = Argument("gid", jint)
|
||||
gids = Argument("gids", jintArray)
|
||||
runtime_flags = Argument("runtime_flags", jint)
|
||||
rlimits = Argument("rlimits", JArray(jintArray))
|
||||
mount_external = Argument("mount_external", jint)
|
||||
se_info = Argument("se_info", jstring)
|
||||
nice_name = Argument("nice_name", jstring)
|
||||
fds_to_close = Argument("fds_to_close", jintArray)
|
||||
instruction_set = Argument("instruction_set", jstring)
|
||||
app_data_dir = Argument("app_data_dir", jstring)
|
||||
|
||||
# o
|
||||
fds_to_ignore = Argument('fds_to_ignore', jintArray, True)
|
||||
fds_to_ignore = Argument("fds_to_ignore", jintArray, True)
|
||||
|
||||
# p
|
||||
is_child_zygote = Argument('is_child_zygote', jboolean, True)
|
||||
is_child_zygote = Argument("is_child_zygote", jboolean, True)
|
||||
|
||||
# q_alt
|
||||
is_top_app = Argument('is_top_app', jboolean, True)
|
||||
is_top_app = Argument("is_top_app", jboolean, True)
|
||||
|
||||
# r
|
||||
pkg_data_info_list = Argument('pkg_data_info_list', JArray(jstring), True)
|
||||
whitelisted_data_info_list = Argument('whitelisted_data_info_list', JArray(jstring), True)
|
||||
mount_data_dirs = Argument('mount_data_dirs', jboolean, True)
|
||||
mount_storage_dirs = Argument('mount_storage_dirs', jboolean, True)
|
||||
pkg_data_info_list = Argument("pkg_data_info_list", JArray(jstring), True)
|
||||
whitelisted_data_info_list = Argument(
|
||||
"whitelisted_data_info_list", JArray(jstring), True
|
||||
)
|
||||
mount_data_dirs = Argument("mount_data_dirs", jboolean, True)
|
||||
mount_storage_dirs = Argument("mount_storage_dirs", jboolean, True)
|
||||
|
||||
# u
|
||||
mount_sysprop_overrides = Argument('mount_sysprop_overrides', jboolean, True)
|
||||
mount_sysprop_overrides = Argument("mount_sysprop_overrides", jboolean, True)
|
||||
|
||||
# b
|
||||
use_fifo_ui = Argument("use_fifo_ui", jboolean, False)
|
||||
|
||||
# server
|
||||
permitted_capabilities = Argument('permitted_capabilities', jlong)
|
||||
effective_capabilities = Argument('effective_capabilities', jlong)
|
||||
permitted_capabilities = Argument("permitted_capabilities", jlong)
|
||||
effective_capabilities = Argument("effective_capabilities", jlong)
|
||||
|
||||
# Method definitions
|
||||
fas_l = ForkAndSpec('l', [uid, gid, gids, runtime_flags, rlimits, mount_external,
|
||||
se_info, nice_name, fds_to_close, instruction_set, app_data_dir])
|
||||
fas_l = ForkApp(
|
||||
"l",
|
||||
[
|
||||
uid,
|
||||
gid,
|
||||
gids,
|
||||
runtime_flags,
|
||||
rlimits,
|
||||
mount_external,
|
||||
se_info,
|
||||
nice_name,
|
||||
fds_to_close,
|
||||
instruction_set,
|
||||
app_data_dir,
|
||||
],
|
||||
)
|
||||
|
||||
fas_o = ForkAndSpec('o', [uid, gid, gids, runtime_flags, rlimits, mount_external,
|
||||
se_info, nice_name, fds_to_close, fds_to_ignore, instruction_set, app_data_dir])
|
||||
fas_o = ForkApp(
|
||||
"o",
|
||||
[
|
||||
uid,
|
||||
gid,
|
||||
gids,
|
||||
runtime_flags,
|
||||
rlimits,
|
||||
mount_external,
|
||||
se_info,
|
||||
nice_name,
|
||||
fds_to_close,
|
||||
fds_to_ignore,
|
||||
instruction_set,
|
||||
app_data_dir,
|
||||
],
|
||||
)
|
||||
|
||||
fas_p = ForkAndSpec('p', [uid, gid, gids, runtime_flags, rlimits, mount_external, se_info,
|
||||
nice_name, fds_to_close, fds_to_ignore, is_child_zygote, instruction_set, app_data_dir])
|
||||
fas_p = ForkApp(
|
||||
"p",
|
||||
[
|
||||
uid,
|
||||
gid,
|
||||
gids,
|
||||
runtime_flags,
|
||||
rlimits,
|
||||
mount_external,
|
||||
se_info,
|
||||
nice_name,
|
||||
fds_to_close,
|
||||
fds_to_ignore,
|
||||
is_child_zygote,
|
||||
instruction_set,
|
||||
app_data_dir,
|
||||
],
|
||||
)
|
||||
|
||||
fas_q_alt = ForkAndSpec('q_alt', [uid, gid, gids, runtime_flags, rlimits, mount_external, se_info,
|
||||
nice_name, fds_to_close, fds_to_ignore, is_child_zygote, instruction_set, app_data_dir, is_top_app])
|
||||
fas_q_alt = ForkApp(
|
||||
"q_alt",
|
||||
[
|
||||
uid,
|
||||
gid,
|
||||
gids,
|
||||
runtime_flags,
|
||||
rlimits,
|
||||
mount_external,
|
||||
se_info,
|
||||
nice_name,
|
||||
fds_to_close,
|
||||
fds_to_ignore,
|
||||
is_child_zygote,
|
||||
instruction_set,
|
||||
app_data_dir,
|
||||
is_top_app,
|
||||
],
|
||||
)
|
||||
|
||||
fas_r = ForkAndSpec('r', [uid, gid, gids, runtime_flags, rlimits, mount_external, se_info,
|
||||
nice_name, fds_to_close, fds_to_ignore, is_child_zygote, instruction_set, app_data_dir, is_top_app,
|
||||
pkg_data_info_list, whitelisted_data_info_list, mount_data_dirs, mount_storage_dirs])
|
||||
fas_r = ForkApp(
|
||||
"r",
|
||||
[
|
||||
uid,
|
||||
gid,
|
||||
gids,
|
||||
runtime_flags,
|
||||
rlimits,
|
||||
mount_external,
|
||||
se_info,
|
||||
nice_name,
|
||||
fds_to_close,
|
||||
fds_to_ignore,
|
||||
is_child_zygote,
|
||||
instruction_set,
|
||||
app_data_dir,
|
||||
is_top_app,
|
||||
pkg_data_info_list,
|
||||
whitelisted_data_info_list,
|
||||
mount_data_dirs,
|
||||
mount_storage_dirs,
|
||||
],
|
||||
)
|
||||
|
||||
fas_u = ForkAndSpec('u', [uid, gid, gids, runtime_flags, rlimits, mount_external, se_info,
|
||||
nice_name, fds_to_close, fds_to_ignore, is_child_zygote, instruction_set, app_data_dir, is_top_app,
|
||||
pkg_data_info_list, whitelisted_data_info_list, mount_data_dirs, mount_storage_dirs, mount_sysprop_overrides])
|
||||
fas_u = ForkApp(
|
||||
"u",
|
||||
[
|
||||
uid,
|
||||
gid,
|
||||
gids,
|
||||
runtime_flags,
|
||||
rlimits,
|
||||
mount_external,
|
||||
se_info,
|
||||
nice_name,
|
||||
fds_to_close,
|
||||
fds_to_ignore,
|
||||
is_child_zygote,
|
||||
instruction_set,
|
||||
app_data_dir,
|
||||
is_top_app,
|
||||
pkg_data_info_list,
|
||||
whitelisted_data_info_list,
|
||||
mount_data_dirs,
|
||||
mount_storage_dirs,
|
||||
mount_sysprop_overrides,
|
||||
],
|
||||
)
|
||||
|
||||
fas_samsung_m = ForkAndSpec('samsung_m', [uid, gid, gids, runtime_flags, rlimits, mount_external,
|
||||
se_info, Anon(jint), Anon(jint), nice_name, fds_to_close, instruction_set, app_data_dir])
|
||||
fas_b = ForkApp(
|
||||
"b",
|
||||
[
|
||||
uid,
|
||||
gid,
|
||||
gids,
|
||||
runtime_flags,
|
||||
rlimits,
|
||||
mount_external,
|
||||
se_info,
|
||||
nice_name,
|
||||
fds_to_close,
|
||||
fds_to_ignore,
|
||||
is_child_zygote,
|
||||
instruction_set,
|
||||
app_data_dir,
|
||||
is_top_app,
|
||||
use_fifo_ui,
|
||||
pkg_data_info_list,
|
||||
whitelisted_data_info_list,
|
||||
mount_data_dirs,
|
||||
mount_storage_dirs,
|
||||
mount_sysprop_overrides,
|
||||
],
|
||||
)
|
||||
|
||||
fas_samsung_n = ForkAndSpec('samsung_n', [uid, gid, gids, runtime_flags, rlimits, mount_external,
|
||||
se_info, Anon(jint), Anon(jint), nice_name, fds_to_close, instruction_set, app_data_dir, Anon(jint)])
|
||||
fas_samsung_m = ForkApp(
|
||||
"samsung_m",
|
||||
[
|
||||
uid,
|
||||
gid,
|
||||
gids,
|
||||
runtime_flags,
|
||||
rlimits,
|
||||
mount_external,
|
||||
se_info,
|
||||
Anon(jint),
|
||||
Anon(jint),
|
||||
nice_name,
|
||||
fds_to_close,
|
||||
instruction_set,
|
||||
app_data_dir,
|
||||
],
|
||||
)
|
||||
|
||||
fas_samsung_o = ForkAndSpec('samsung_o', [uid, gid, gids, runtime_flags, rlimits, mount_external,
|
||||
se_info, Anon(jint), Anon(jint), nice_name, fds_to_close, fds_to_ignore, instruction_set, app_data_dir])
|
||||
fas_samsung_n = ForkApp(
|
||||
"samsung_n",
|
||||
[
|
||||
uid,
|
||||
gid,
|
||||
gids,
|
||||
runtime_flags,
|
||||
rlimits,
|
||||
mount_external,
|
||||
se_info,
|
||||
Anon(jint),
|
||||
Anon(jint),
|
||||
nice_name,
|
||||
fds_to_close,
|
||||
instruction_set,
|
||||
app_data_dir,
|
||||
Anon(jint),
|
||||
],
|
||||
)
|
||||
|
||||
fas_samsung_p = ForkAndSpec('samsung_p', [uid, gid, gids, runtime_flags, rlimits, mount_external,
|
||||
se_info, Anon(jint), Anon(jint), nice_name, fds_to_close, fds_to_ignore, is_child_zygote,
|
||||
instruction_set, app_data_dir])
|
||||
fas_samsung_o = ForkApp(
|
||||
"samsung_o",
|
||||
[
|
||||
uid,
|
||||
gid,
|
||||
gids,
|
||||
runtime_flags,
|
||||
rlimits,
|
||||
mount_external,
|
||||
se_info,
|
||||
Anon(jint),
|
||||
Anon(jint),
|
||||
nice_name,
|
||||
fds_to_close,
|
||||
fds_to_ignore,
|
||||
instruction_set,
|
||||
app_data_dir,
|
||||
],
|
||||
)
|
||||
|
||||
spec_q = SpecApp('q', [uid, gid, gids, runtime_flags, rlimits, mount_external, se_info,
|
||||
nice_name, is_child_zygote, instruction_set, app_data_dir])
|
||||
fas_samsung_p = ForkApp(
|
||||
"samsung_p",
|
||||
[
|
||||
uid,
|
||||
gid,
|
||||
gids,
|
||||
runtime_flags,
|
||||
rlimits,
|
||||
mount_external,
|
||||
se_info,
|
||||
Anon(jint),
|
||||
Anon(jint),
|
||||
nice_name,
|
||||
fds_to_close,
|
||||
fds_to_ignore,
|
||||
is_child_zygote,
|
||||
instruction_set,
|
||||
app_data_dir,
|
||||
],
|
||||
)
|
||||
|
||||
spec_q_alt = SpecApp('q_alt', [uid, gid, gids, runtime_flags, rlimits, mount_external, se_info,
|
||||
nice_name, is_child_zygote, instruction_set, app_data_dir, is_top_app])
|
||||
spec_q = SpecializeApp(
|
||||
"q",
|
||||
[
|
||||
uid,
|
||||
gid,
|
||||
gids,
|
||||
runtime_flags,
|
||||
rlimits,
|
||||
mount_external,
|
||||
se_info,
|
||||
nice_name,
|
||||
is_child_zygote,
|
||||
instruction_set,
|
||||
app_data_dir,
|
||||
],
|
||||
)
|
||||
|
||||
spec_r = SpecApp('r', [uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, nice_name,
|
||||
is_child_zygote, instruction_set, app_data_dir, is_top_app, pkg_data_info_list,
|
||||
whitelisted_data_info_list, mount_data_dirs, mount_storage_dirs])
|
||||
spec_q_alt = SpecializeApp(
|
||||
"q_alt",
|
||||
[
|
||||
uid,
|
||||
gid,
|
||||
gids,
|
||||
runtime_flags,
|
||||
rlimits,
|
||||
mount_external,
|
||||
se_info,
|
||||
nice_name,
|
||||
is_child_zygote,
|
||||
instruction_set,
|
||||
app_data_dir,
|
||||
is_top_app,
|
||||
],
|
||||
)
|
||||
|
||||
spec_u = SpecApp('u', [uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, nice_name,
|
||||
is_child_zygote, instruction_set, app_data_dir, is_top_app, pkg_data_info_list,
|
||||
whitelisted_data_info_list, mount_data_dirs, mount_storage_dirs, mount_sysprop_overrides])
|
||||
spec_r = SpecializeApp(
|
||||
"r",
|
||||
[
|
||||
uid,
|
||||
gid,
|
||||
gids,
|
||||
runtime_flags,
|
||||
rlimits,
|
||||
mount_external,
|
||||
se_info,
|
||||
nice_name,
|
||||
is_child_zygote,
|
||||
instruction_set,
|
||||
app_data_dir,
|
||||
is_top_app,
|
||||
pkg_data_info_list,
|
||||
whitelisted_data_info_list,
|
||||
mount_data_dirs,
|
||||
mount_storage_dirs,
|
||||
],
|
||||
)
|
||||
|
||||
spec_samsung_q = SpecApp('samsung_q', [uid, gid, gids, runtime_flags, rlimits, mount_external,
|
||||
se_info, Anon(jint), Anon(jint), nice_name, is_child_zygote, instruction_set, app_data_dir])
|
||||
spec_u = SpecializeApp(
|
||||
"u",
|
||||
[
|
||||
uid,
|
||||
gid,
|
||||
gids,
|
||||
runtime_flags,
|
||||
rlimits,
|
||||
mount_external,
|
||||
se_info,
|
||||
nice_name,
|
||||
is_child_zygote,
|
||||
instruction_set,
|
||||
app_data_dir,
|
||||
is_top_app,
|
||||
pkg_data_info_list,
|
||||
whitelisted_data_info_list,
|
||||
mount_data_dirs,
|
||||
mount_storage_dirs,
|
||||
mount_sysprop_overrides,
|
||||
],
|
||||
)
|
||||
|
||||
server_l = ForkServer('l', [uid, gid, gids, runtime_flags, rlimits,
|
||||
permitted_capabilities, effective_capabilities])
|
||||
spec_samsung_q = SpecializeApp(
|
||||
"samsung_q",
|
||||
[
|
||||
uid,
|
||||
gid,
|
||||
gids,
|
||||
runtime_flags,
|
||||
rlimits,
|
||||
mount_external,
|
||||
se_info,
|
||||
Anon(jint),
|
||||
Anon(jint),
|
||||
nice_name,
|
||||
is_child_zygote,
|
||||
instruction_set,
|
||||
app_data_dir,
|
||||
],
|
||||
)
|
||||
|
||||
server_samsung_q = ForkServer('samsung_q', [uid, gid, gids, runtime_flags, Anon(jint), Anon(jint), rlimits,
|
||||
permitted_capabilities, effective_capabilities])
|
||||
server_l = ForkServer(
|
||||
"l",
|
||||
[
|
||||
uid,
|
||||
gid,
|
||||
gids,
|
||||
runtime_flags,
|
||||
rlimits,
|
||||
permitted_capabilities,
|
||||
effective_capabilities,
|
||||
],
|
||||
)
|
||||
|
||||
def gen_jni_def(name, methods):
|
||||
decl = ''
|
||||
decl += ind(0) + f'std::array<JNINativeMethod, {len(methods)}> {name}_methods = {{{{'
|
||||
server_samsung_q = ForkServer(
|
||||
"samsung_q",
|
||||
[
|
||||
uid,
|
||||
gid,
|
||||
gids,
|
||||
runtime_flags,
|
||||
Anon(jint),
|
||||
Anon(jint),
|
||||
rlimits,
|
||||
permitted_capabilities,
|
||||
effective_capabilities,
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def gen_jni_def(field: str, methods: list[JNIHook]):
|
||||
decl = ""
|
||||
decl += ind(0) + f"std::array<JNINativeMethod, {len(methods)}> {field} = {{{{"
|
||||
for i, m in enumerate(methods):
|
||||
decl += ind(1) + '{'
|
||||
decl += ind(2) + f'"{m.base_name()}",'
|
||||
decl += ind(2) + f'"{m.jni()}",'
|
||||
decl += ind(2) + f'(void *) +[] [[clang::no_stack_protector]] (JNIEnv *env, jclass clazz, {m.cpp()}) static -> {m.ret.type.cpp} {{'
|
||||
decl += m.body(name, i)
|
||||
if m.ret.value:
|
||||
decl += ind(3) + f'return {m.ret.value};'
|
||||
decl += ind(2) + '}'
|
||||
decl += ind(1) + '},'
|
||||
decl += ind(0) + '}};'
|
||||
decl += ind(1) + f"// {m.name}"
|
||||
decl += ind(1) + "{"
|
||||
decl += ind(2) + f'"{m.hook_target()}",'
|
||||
decl += ind(2) + f'"{m.jni_sig()}",'
|
||||
decl += ind(2) + f"(void *) +{m.cpp_lambda_sig()} {{"
|
||||
orig_fn_ptr = f"get_defs()->{field}[{i}].fnPtr"
|
||||
decl += m.body(orig_fn_ptr)
|
||||
decl += ind(2) + "}"
|
||||
decl += ind(1) + "},"
|
||||
decl += ind(0) + "}};"
|
||||
decl += ind(0)
|
||||
|
||||
return decl
|
||||
|
||||
with open('jni_hooks.hpp', 'w') as f:
|
||||
f.write('// Generated by gen_jni_hooks.py\n')
|
||||
|
||||
f.write(gen_jni_def('zygote', [
|
||||
fas_l, fas_o, fas_p, fas_q_alt, fas_r, fas_u, fas_samsung_m, fas_samsung_n, fas_samsung_o,
|
||||
fas_samsung_p, spec_q, spec_q_alt, spec_r, spec_u, spec_samsung_q, server_l, server_samsung_q]))
|
||||
with open("jni_hooks.hpp", "w") as f:
|
||||
f.write("// Generated by gen_jni_hooks.py\n")
|
||||
f.write("#pragma once\n\n")
|
||||
f.write("struct JniHookDefinitions;\n")
|
||||
f.write("static JniHookDefinitions *get_defs();\n\n")
|
||||
f.write("struct JniHookDefinitions {\n")
|
||||
f.write(
|
||||
gen_jni_def(
|
||||
"fork_app_methods",
|
||||
[
|
||||
fas_l,
|
||||
fas_o,
|
||||
fas_p,
|
||||
fas_q_alt,
|
||||
fas_r,
|
||||
fas_u,
|
||||
fas_b,
|
||||
fas_samsung_m,
|
||||
fas_samsung_n,
|
||||
fas_samsung_o,
|
||||
fas_samsung_p,
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
f.write('\n')
|
||||
f.write(
|
||||
gen_jni_def(
|
||||
"specialize_app_methods",
|
||||
[spec_q, spec_q_alt, spec_r, spec_u, spec_samsung_q],
|
||||
)
|
||||
)
|
||||
|
||||
f.write(gen_jni_def("fork_server_methods", [server_l, server_samsung_q]))
|
||||
|
||||
f.write("\n};\n")
|
||||
|
||||
@@ -8,10 +8,10 @@
|
||||
#include <lsplt.hpp>
|
||||
|
||||
#include <base.hpp>
|
||||
#include <consts.hpp>
|
||||
|
||||
#include "zygisk.hpp"
|
||||
#include "module.hpp"
|
||||
#include "jni_hooks.hpp"
|
||||
|
||||
using namespace std;
|
||||
|
||||
@@ -90,25 +90,15 @@ using namespace std;
|
||||
|
||||
constexpr const char *kZygoteInit = "com.android.internal.os.ZygoteInit";
|
||||
constexpr const char *kZygote = "com/android/internal/os/Zygote";
|
||||
|
||||
// Global contexts:
|
||||
//
|
||||
// HookContext lives as long as Zygisk is loaded in memory. It tracks the process's function
|
||||
// hooking state and bootstraps code injection until we replace the process specialization methods.
|
||||
//
|
||||
// ZygiskContext lives during the process specialization process. It implements Zygisk
|
||||
// features, such as loading modules and customizing process fork/specialization.
|
||||
|
||||
ZygiskContext *g_ctx;
|
||||
struct HookContext;
|
||||
static HookContext *g_hook;
|
||||
constexpr const char *kForkApp = "nativeForkAndSpecialize";
|
||||
constexpr const char *kSpecializeApp = "nativeSpecializeAppProcess";
|
||||
constexpr const char *kForkServer = "nativeForkSystemServer";
|
||||
|
||||
using JNIMethods = std::span<JNINativeMethod>;
|
||||
using JNIMethodsDyn = std::pair<unique_ptr<JNINativeMethod[]>, size_t>;
|
||||
|
||||
struct HookContext {
|
||||
#include "jni_hooks.hpp"
|
||||
struct HookContext : JniHookDefinitions {
|
||||
|
||||
// std::array<JNINativeMethod> zygote_methods
|
||||
vector<tuple<dev_t, ino_t, const char *, void **>> plt_backup;
|
||||
const NativeBridgeRuntimeCallbacks *runtime_callbacks = nullptr;
|
||||
void *self_handle = nullptr;
|
||||
@@ -119,15 +109,34 @@ struct HookContext {
|
||||
void restore_plt_hook();
|
||||
void hook_zygote_jni();
|
||||
void restore_zygote_hook(JNIEnv *env);
|
||||
void hook_jni_methods(JNIEnv *env, const char *clz, JNIMethods methods);
|
||||
void hook_jni_methods(JNIEnv *env, const char *clz, JNIMethods methods) const;
|
||||
void post_native_bridge_load(void *handle);
|
||||
|
||||
private:
|
||||
void register_hook(dev_t dev, ino_t inode, const char *symbol, void *new_func, void **old_func);
|
||||
int hook_jni_methods(JNIEnv *env, jclass clazz, JNIMethods methods) const;
|
||||
JNIMethodsDyn get_jni_methods(JNIEnv *env, jclass clazz) const;
|
||||
};
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
// Global contexts:
|
||||
//
|
||||
// HookContext lives as long as Zygisk is loaded in memory. It tracks the process's function
|
||||
// hooking state and bootstraps code injection until we replace the process specialization methods.
|
||||
//
|
||||
// ZygiskContext lives during the process specialization process. It implements Zygisk
|
||||
// features, such as loading modules and customizing process fork/specialization.
|
||||
|
||||
ZygiskContext *g_ctx;
|
||||
static HookContext *g_hook;
|
||||
|
||||
static JniHookDefinitions *get_defs() {
|
||||
return g_hook;
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
#define DCL_HOOK_FUNC(ret, func, ...) \
|
||||
ret (*old_##func)(__VA_ARGS__); \
|
||||
ret new_##func(__VA_ARGS__)
|
||||
@@ -452,56 +461,74 @@ void HookContext::restore_plt_hook() {
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
void HookContext::hook_jni_methods(JNIEnv *env, const char *clz, JNIMethods methods) {
|
||||
jclass clazz;
|
||||
if (!runtime_callbacks || !env || !clz || !(clazz = env->FindClass(clz))) {
|
||||
for (auto &method : methods) {
|
||||
method.fnPtr = nullptr;
|
||||
}
|
||||
return;
|
||||
}
|
||||
JNIMethodsDyn HookContext::get_jni_methods(JNIEnv *env, jclass clazz) const {
|
||||
size_t total = runtime_callbacks->getNativeMethodCount(env, clazz);
|
||||
auto methods = std::make_unique_for_overwrite<JNINativeMethod[]>(total);
|
||||
runtime_callbacks->getNativeMethods(env, clazz, methods.get(), total);
|
||||
return std::make_pair(std::move(methods), total);
|
||||
}
|
||||
|
||||
// Backup existing methods
|
||||
auto total = runtime_callbacks->getNativeMethodCount(env, clazz);
|
||||
auto old_methods = std::make_unique_for_overwrite<JNINativeMethod[]>(total);
|
||||
runtime_callbacks->getNativeMethods(env, clazz, old_methods.get(), total);
|
||||
|
||||
// WARNING: the signature field returned from getNativeMethods is in a non-standard format.
|
||||
// DO NOT TRY TO USE IT. This is the reason why we try to call RegisterNatives on every single
|
||||
// provided JNI methods directly to be 100% sure about whether a signature matches or not.
|
||||
|
||||
// Replace methods
|
||||
static void register_jni_methods(JNIEnv *env, jclass clazz, JNIMethods methods) {
|
||||
for (auto &method : methods) {
|
||||
// It's useful to allow nullptr function pointer for restoring hook
|
||||
if (!method.fnPtr) continue;
|
||||
|
||||
// It's normal that the method is not found
|
||||
if (env->RegisterNatives(clazz, &method, 1) == JNI_ERR || env->ExceptionCheck() == JNI_TRUE) {
|
||||
if (auto exception = env->ExceptionOccurred()) {
|
||||
env->DeleteLocalRef(exception);
|
||||
}
|
||||
env->ExceptionClear();
|
||||
method.fnPtr = nullptr;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int HookContext::hook_jni_methods(JNIEnv *env, jclass clazz, JNIMethods methods) const {
|
||||
// Backup existing methods
|
||||
auto o = get_jni_methods(env, clazz);
|
||||
const auto old_methods = span(o.first.get(), o.second);
|
||||
|
||||
// WARNING: the signature field returned from getNativeMethods is in a non-standard format.
|
||||
// DO NOT TRY TO USE IT. This is the reason why we try to call RegisterNatives on every single
|
||||
// provided JNI methods directly to be 100% sure about whether a signature matches or not.
|
||||
|
||||
// Replace methods
|
||||
register_jni_methods(env, clazz, methods);
|
||||
|
||||
// Fetch the new set of native methods
|
||||
auto new_methods = std::make_unique_for_overwrite<JNINativeMethod[]>(total);
|
||||
runtime_callbacks->getNativeMethods(env, clazz, new_methods.get(), total);
|
||||
auto n = get_jni_methods(env, clazz);
|
||||
const auto new_methods = span(n.first.get(), n.second);
|
||||
|
||||
// Find the old function pointer and return to caller
|
||||
int hook_count = 0;
|
||||
for (auto &method : methods) {
|
||||
if (!method.fnPtr) continue;
|
||||
for (auto i = 0; i < total; ++i) {
|
||||
auto &new_method = new_methods[i];
|
||||
for (const auto &new_method : new_methods) {
|
||||
if (new_method.fnPtr == method.fnPtr) {
|
||||
auto &old_method = old_methods[i];
|
||||
ZLOGV("replace %s#%s%s %p -> %p\n", clz, method.name, method.signature, old_method.fnPtr, method.fnPtr);
|
||||
method.fnPtr = old_method.fnPtr;
|
||||
break;
|
||||
for (const auto &old_method : old_methods) {
|
||||
if (strcmp(old_method.name, new_method.name) == 0 &&
|
||||
strcmp(old_method.signature, new_method.signature) == 0) {
|
||||
ZLOGV("replace %s %s %p -> %p\n",
|
||||
method.name, method.signature, old_method.fnPtr, method.fnPtr);
|
||||
method.fnPtr = old_method.fnPtr;
|
||||
++hook_count;
|
||||
// Break 2 levels of for loop
|
||||
goto next_method;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
next_method:
|
||||
}
|
||||
return hook_count;
|
||||
}
|
||||
|
||||
|
||||
void HookContext::hook_jni_methods(JNIEnv *env, const char *clz, JNIMethods methods) const {
|
||||
jclass clazz;
|
||||
if (!runtime_callbacks || !env || !clz || !((clazz = env->FindClass(clz)))) {
|
||||
ranges::for_each(methods, [](auto &m) { m.fnPtr = nullptr; });
|
||||
return;
|
||||
}
|
||||
hook_jni_methods(env, clazz, methods);
|
||||
}
|
||||
|
||||
void HookContext::hook_zygote_jni() {
|
||||
@@ -538,11 +565,54 @@ void HookContext::hook_zygote_jni() {
|
||||
if (res != JNI_OK || env == nullptr) {
|
||||
ZLOGW("JNIEnv not found\n");
|
||||
}
|
||||
hook_jni_methods(env, kZygote, zygote_methods);
|
||||
|
||||
JNINativeMethod missing_method{};
|
||||
bool replaced_fork_app = false;
|
||||
bool replaced_specialize_app = false;
|
||||
bool replaced_fork_server = false;
|
||||
|
||||
jclass clazz = env->FindClass(kZygote);
|
||||
auto [ptr, count] = get_jni_methods(env, clazz);
|
||||
for (const auto methods = span(ptr.get(), count); const auto &method : methods) {
|
||||
if (strcmp(method.name, kForkApp) == 0) {
|
||||
if (hook_jni_methods(env, clazz, fork_app_methods) == 0) {
|
||||
missing_method = method;
|
||||
break;
|
||||
}
|
||||
replaced_fork_app = true;
|
||||
} else if (strcmp(method.name, kSpecializeApp) == 0) {
|
||||
if (hook_jni_methods(env, clazz, specialize_app_methods) == 0) {
|
||||
missing_method = method;
|
||||
break;
|
||||
}
|
||||
replaced_specialize_app = true;
|
||||
} else if (strcmp(method.name, kForkServer) == 0) {
|
||||
if (hook_jni_methods(env, clazz, fork_server_methods) == 0) {
|
||||
missing_method = method;
|
||||
break;
|
||||
}
|
||||
replaced_fork_server = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (missing_method.name != nullptr) {
|
||||
ZLOGE("Cannot hook method: %s %s\n", missing_method.name, missing_method.signature);
|
||||
// Restore methods that were already replaced
|
||||
if (replaced_fork_app) register_jni_methods(env, clazz, fork_app_methods);
|
||||
if (replaced_specialize_app) register_jni_methods(env, clazz, specialize_app_methods);
|
||||
if (replaced_fork_server) register_jni_methods(env, clazz, fork_server_methods);
|
||||
// Clear the method lists just in case
|
||||
ranges::for_each(fork_app_methods, [](auto &m) { m.fnPtr = nullptr; });
|
||||
ranges::for_each(specialize_app_methods, [](auto &m) { m.fnPtr = nullptr; });
|
||||
ranges::for_each(fork_server_methods, [](auto &m) { m.fnPtr = nullptr; });
|
||||
}
|
||||
}
|
||||
|
||||
void HookContext::restore_zygote_hook(JNIEnv *env) {
|
||||
hook_jni_methods(env, kZygote, zygote_methods);
|
||||
jclass clazz = env->FindClass(kZygote);
|
||||
register_jni_methods(env, clazz, fork_app_methods);
|
||||
register_jni_methods(env, clazz, specialize_app_methods);
|
||||
register_jni_methods(env, clazz, fork_server_methods);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
@@ -553,5 +623,5 @@ void hook_entry() {
|
||||
}
|
||||
|
||||
void hookJniNativeMethods(JNIEnv *env, const char *clz, JNINativeMethod *methods, int numMethods) {
|
||||
g_hook->hook_jni_methods(env, clz, { methods, (size_t) numMethods });
|
||||
g_hook->hook_jni_methods(env, clz, { methods, static_cast<size_t>(numMethods) });
|
||||
}
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
// Generated by gen_jni_hooks.py
|
||||
#pragma once
|
||||
|
||||
std::array<JNINativeMethod, 17> zygote_methods = {{
|
||||
struct JniHookDefinitions;
|
||||
static JniHookDefinitions *get_defs();
|
||||
|
||||
struct JniHookDefinitions {
|
||||
|
||||
std::array<JNINativeMethod, 11> fork_app_methods = {{
|
||||
// nativeForkAndSpecialize_l
|
||||
{
|
||||
"nativeForkAndSpecialize",
|
||||
"(II[II[[IILjava/lang/String;Ljava/lang/String;[ILjava/lang/String;Ljava/lang/String;)I",
|
||||
@@ -8,13 +15,14 @@ std::array<JNINativeMethod, 17> zygote_methods = {{
|
||||
AppSpecializeArgs_v5 args(uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, nice_name, instruction_set, app_data_dir);
|
||||
ZygiskContext ctx(env, &args);
|
||||
ctx.nativeForkAndSpecialize_pre();
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jintArray fds_to_close, jstring instruction_set, jstring app_data_dir)>(g_hook->zygote_methods[0].fnPtr)(
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jintArray fds_to_close, jstring instruction_set, jstring app_data_dir)>(get_defs()->fork_app_methods[0].fnPtr)(
|
||||
env, clazz, uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, nice_name, fds_to_close, instruction_set, app_data_dir
|
||||
);
|
||||
ctx.nativeForkAndSpecialize_post();
|
||||
return ctx.pid;
|
||||
}
|
||||
},
|
||||
// nativeForkAndSpecialize_o
|
||||
{
|
||||
"nativeForkAndSpecialize",
|
||||
"(II[II[[IILjava/lang/String;Ljava/lang/String;[I[ILjava/lang/String;Ljava/lang/String;)I",
|
||||
@@ -23,13 +31,14 @@ std::array<JNINativeMethod, 17> zygote_methods = {{
|
||||
args.fds_to_ignore = &fds_to_ignore;
|
||||
ZygiskContext ctx(env, &args);
|
||||
ctx.nativeForkAndSpecialize_pre();
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jintArray fds_to_close, jintArray fds_to_ignore, jstring instruction_set, jstring app_data_dir)>(g_hook->zygote_methods[1].fnPtr)(
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jintArray fds_to_close, jintArray fds_to_ignore, jstring instruction_set, jstring app_data_dir)>(get_defs()->fork_app_methods[1].fnPtr)(
|
||||
env, clazz, uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, nice_name, fds_to_close, fds_to_ignore, instruction_set, app_data_dir
|
||||
);
|
||||
ctx.nativeForkAndSpecialize_post();
|
||||
return ctx.pid;
|
||||
}
|
||||
},
|
||||
// nativeForkAndSpecialize_p
|
||||
{
|
||||
"nativeForkAndSpecialize",
|
||||
"(II[II[[IILjava/lang/String;Ljava/lang/String;[I[IZLjava/lang/String;Ljava/lang/String;)I",
|
||||
@@ -39,13 +48,14 @@ std::array<JNINativeMethod, 17> zygote_methods = {{
|
||||
args.is_child_zygote = &is_child_zygote;
|
||||
ZygiskContext ctx(env, &args);
|
||||
ctx.nativeForkAndSpecialize_pre();
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jintArray fds_to_close, jintArray fds_to_ignore, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir)>(g_hook->zygote_methods[2].fnPtr)(
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jintArray fds_to_close, jintArray fds_to_ignore, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir)>(get_defs()->fork_app_methods[2].fnPtr)(
|
||||
env, clazz, uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, nice_name, fds_to_close, fds_to_ignore, is_child_zygote, instruction_set, app_data_dir
|
||||
);
|
||||
ctx.nativeForkAndSpecialize_post();
|
||||
return ctx.pid;
|
||||
}
|
||||
},
|
||||
// nativeForkAndSpecialize_q_alt
|
||||
{
|
||||
"nativeForkAndSpecialize",
|
||||
"(II[II[[IILjava/lang/String;Ljava/lang/String;[I[IZLjava/lang/String;Ljava/lang/String;Z)I",
|
||||
@@ -56,13 +66,14 @@ std::array<JNINativeMethod, 17> zygote_methods = {{
|
||||
args.is_top_app = &is_top_app;
|
||||
ZygiskContext ctx(env, &args);
|
||||
ctx.nativeForkAndSpecialize_pre();
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jintArray fds_to_close, jintArray fds_to_ignore, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir, jboolean is_top_app)>(g_hook->zygote_methods[3].fnPtr)(
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jintArray fds_to_close, jintArray fds_to_ignore, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir, jboolean is_top_app)>(get_defs()->fork_app_methods[3].fnPtr)(
|
||||
env, clazz, uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, nice_name, fds_to_close, fds_to_ignore, is_child_zygote, instruction_set, app_data_dir, is_top_app
|
||||
);
|
||||
ctx.nativeForkAndSpecialize_post();
|
||||
return ctx.pid;
|
||||
}
|
||||
},
|
||||
// nativeForkAndSpecialize_r
|
||||
{
|
||||
"nativeForkAndSpecialize",
|
||||
"(II[II[[IILjava/lang/String;Ljava/lang/String;[I[IZLjava/lang/String;Ljava/lang/String;Z[Ljava/lang/String;[Ljava/lang/String;ZZ)I",
|
||||
@@ -77,13 +88,14 @@ std::array<JNINativeMethod, 17> zygote_methods = {{
|
||||
args.mount_storage_dirs = &mount_storage_dirs;
|
||||
ZygiskContext ctx(env, &args);
|
||||
ctx.nativeForkAndSpecialize_pre();
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jintArray fds_to_close, jintArray fds_to_ignore, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir, jboolean is_top_app, jobjectArray pkg_data_info_list, jobjectArray whitelisted_data_info_list, jboolean mount_data_dirs, jboolean mount_storage_dirs)>(g_hook->zygote_methods[4].fnPtr)(
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jintArray fds_to_close, jintArray fds_to_ignore, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir, jboolean is_top_app, jobjectArray pkg_data_info_list, jobjectArray whitelisted_data_info_list, jboolean mount_data_dirs, jboolean mount_storage_dirs)>(get_defs()->fork_app_methods[4].fnPtr)(
|
||||
env, clazz, uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, nice_name, fds_to_close, fds_to_ignore, is_child_zygote, instruction_set, app_data_dir, is_top_app, pkg_data_info_list, whitelisted_data_info_list, mount_data_dirs, mount_storage_dirs
|
||||
);
|
||||
ctx.nativeForkAndSpecialize_post();
|
||||
return ctx.pid;
|
||||
}
|
||||
},
|
||||
// nativeForkAndSpecialize_u
|
||||
{
|
||||
"nativeForkAndSpecialize",
|
||||
"(II[II[[IILjava/lang/String;Ljava/lang/String;[I[IZLjava/lang/String;Ljava/lang/String;Z[Ljava/lang/String;[Ljava/lang/String;ZZZ)I",
|
||||
@@ -99,13 +111,37 @@ std::array<JNINativeMethod, 17> zygote_methods = {{
|
||||
args.mount_sysprop_overrides = &mount_sysprop_overrides;
|
||||
ZygiskContext ctx(env, &args);
|
||||
ctx.nativeForkAndSpecialize_pre();
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jintArray fds_to_close, jintArray fds_to_ignore, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir, jboolean is_top_app, jobjectArray pkg_data_info_list, jobjectArray whitelisted_data_info_list, jboolean mount_data_dirs, jboolean mount_storage_dirs, jboolean mount_sysprop_overrides)>(g_hook->zygote_methods[5].fnPtr)(
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jintArray fds_to_close, jintArray fds_to_ignore, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir, jboolean is_top_app, jobjectArray pkg_data_info_list, jobjectArray whitelisted_data_info_list, jboolean mount_data_dirs, jboolean mount_storage_dirs, jboolean mount_sysprop_overrides)>(get_defs()->fork_app_methods[5].fnPtr)(
|
||||
env, clazz, uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, nice_name, fds_to_close, fds_to_ignore, is_child_zygote, instruction_set, app_data_dir, is_top_app, pkg_data_info_list, whitelisted_data_info_list, mount_data_dirs, mount_storage_dirs, mount_sysprop_overrides
|
||||
);
|
||||
ctx.nativeForkAndSpecialize_post();
|
||||
return ctx.pid;
|
||||
}
|
||||
},
|
||||
// nativeForkAndSpecialize_b
|
||||
{
|
||||
"nativeForkAndSpecialize",
|
||||
"(II[II[[IILjava/lang/String;Ljava/lang/String;[I[IZLjava/lang/String;Ljava/lang/String;ZZ[Ljava/lang/String;[Ljava/lang/String;ZZZ)I",
|
||||
(void *) +[] [[clang::no_stack_protector]] (JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jintArray fds_to_close, jintArray fds_to_ignore, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir, jboolean is_top_app, jboolean use_fifo_ui, jobjectArray pkg_data_info_list, jobjectArray whitelisted_data_info_list, jboolean mount_data_dirs, jboolean mount_storage_dirs, jboolean mount_sysprop_overrides) static -> jint {
|
||||
AppSpecializeArgs_v5 args(uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, nice_name, instruction_set, app_data_dir);
|
||||
args.fds_to_ignore = &fds_to_ignore;
|
||||
args.is_child_zygote = &is_child_zygote;
|
||||
args.is_top_app = &is_top_app;
|
||||
args.pkg_data_info_list = &pkg_data_info_list;
|
||||
args.whitelisted_data_info_list = &whitelisted_data_info_list;
|
||||
args.mount_data_dirs = &mount_data_dirs;
|
||||
args.mount_storage_dirs = &mount_storage_dirs;
|
||||
args.mount_sysprop_overrides = &mount_sysprop_overrides;
|
||||
ZygiskContext ctx(env, &args);
|
||||
ctx.nativeForkAndSpecialize_pre();
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jintArray fds_to_close, jintArray fds_to_ignore, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir, jboolean is_top_app, jboolean use_fifo_ui, jobjectArray pkg_data_info_list, jobjectArray whitelisted_data_info_list, jboolean mount_data_dirs, jboolean mount_storage_dirs, jboolean mount_sysprop_overrides)>(get_defs()->fork_app_methods[6].fnPtr)(
|
||||
env, clazz, uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, nice_name, fds_to_close, fds_to_ignore, is_child_zygote, instruction_set, app_data_dir, is_top_app, use_fifo_ui, pkg_data_info_list, whitelisted_data_info_list, mount_data_dirs, mount_storage_dirs, mount_sysprop_overrides
|
||||
);
|
||||
ctx.nativeForkAndSpecialize_post();
|
||||
return ctx.pid;
|
||||
}
|
||||
},
|
||||
// nativeForkAndSpecialize_samsung_m
|
||||
{
|
||||
"nativeForkAndSpecialize",
|
||||
"(II[II[[IILjava/lang/String;IILjava/lang/String;[ILjava/lang/String;Ljava/lang/String;)I",
|
||||
@@ -113,13 +149,14 @@ std::array<JNINativeMethod, 17> zygote_methods = {{
|
||||
AppSpecializeArgs_v5 args(uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, nice_name, instruction_set, app_data_dir);
|
||||
ZygiskContext ctx(env, &args);
|
||||
ctx.nativeForkAndSpecialize_pre();
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jint _0, jint _1, jstring nice_name, jintArray fds_to_close, jstring instruction_set, jstring app_data_dir)>(g_hook->zygote_methods[6].fnPtr)(
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jint _0, jint _1, jstring nice_name, jintArray fds_to_close, jstring instruction_set, jstring app_data_dir)>(get_defs()->fork_app_methods[7].fnPtr)(
|
||||
env, clazz, uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, _0, _1, nice_name, fds_to_close, instruction_set, app_data_dir
|
||||
);
|
||||
ctx.nativeForkAndSpecialize_post();
|
||||
return ctx.pid;
|
||||
}
|
||||
},
|
||||
// nativeForkAndSpecialize_samsung_n
|
||||
{
|
||||
"nativeForkAndSpecialize",
|
||||
"(II[II[[IILjava/lang/String;IILjava/lang/String;[ILjava/lang/String;Ljava/lang/String;I)I",
|
||||
@@ -127,13 +164,14 @@ std::array<JNINativeMethod, 17> zygote_methods = {{
|
||||
AppSpecializeArgs_v5 args(uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, nice_name, instruction_set, app_data_dir);
|
||||
ZygiskContext ctx(env, &args);
|
||||
ctx.nativeForkAndSpecialize_pre();
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jint _2, jint _3, jstring nice_name, jintArray fds_to_close, jstring instruction_set, jstring app_data_dir, jint _4)>(g_hook->zygote_methods[7].fnPtr)(
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jint _2, jint _3, jstring nice_name, jintArray fds_to_close, jstring instruction_set, jstring app_data_dir, jint _4)>(get_defs()->fork_app_methods[8].fnPtr)(
|
||||
env, clazz, uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, _2, _3, nice_name, fds_to_close, instruction_set, app_data_dir, _4
|
||||
);
|
||||
ctx.nativeForkAndSpecialize_post();
|
||||
return ctx.pid;
|
||||
}
|
||||
},
|
||||
// nativeForkAndSpecialize_samsung_o
|
||||
{
|
||||
"nativeForkAndSpecialize",
|
||||
"(II[II[[IILjava/lang/String;IILjava/lang/String;[I[ILjava/lang/String;Ljava/lang/String;)I",
|
||||
@@ -142,13 +180,14 @@ std::array<JNINativeMethod, 17> zygote_methods = {{
|
||||
args.fds_to_ignore = &fds_to_ignore;
|
||||
ZygiskContext ctx(env, &args);
|
||||
ctx.nativeForkAndSpecialize_pre();
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jint _5, jint _6, jstring nice_name, jintArray fds_to_close, jintArray fds_to_ignore, jstring instruction_set, jstring app_data_dir)>(g_hook->zygote_methods[8].fnPtr)(
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jint _5, jint _6, jstring nice_name, jintArray fds_to_close, jintArray fds_to_ignore, jstring instruction_set, jstring app_data_dir)>(get_defs()->fork_app_methods[9].fnPtr)(
|
||||
env, clazz, uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, _5, _6, nice_name, fds_to_close, fds_to_ignore, instruction_set, app_data_dir
|
||||
);
|
||||
ctx.nativeForkAndSpecialize_post();
|
||||
return ctx.pid;
|
||||
}
|
||||
},
|
||||
// nativeForkAndSpecialize_samsung_p
|
||||
{
|
||||
"nativeForkAndSpecialize",
|
||||
"(II[II[[IILjava/lang/String;IILjava/lang/String;[I[IZLjava/lang/String;Ljava/lang/String;)I",
|
||||
@@ -158,13 +197,17 @@ std::array<JNINativeMethod, 17> zygote_methods = {{
|
||||
args.is_child_zygote = &is_child_zygote;
|
||||
ZygiskContext ctx(env, &args);
|
||||
ctx.nativeForkAndSpecialize_pre();
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jint _7, jint _8, jstring nice_name, jintArray fds_to_close, jintArray fds_to_ignore, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir)>(g_hook->zygote_methods[9].fnPtr)(
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jint _7, jint _8, jstring nice_name, jintArray fds_to_close, jintArray fds_to_ignore, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir)>(get_defs()->fork_app_methods[10].fnPtr)(
|
||||
env, clazz, uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, _7, _8, nice_name, fds_to_close, fds_to_ignore, is_child_zygote, instruction_set, app_data_dir
|
||||
);
|
||||
ctx.nativeForkAndSpecialize_post();
|
||||
return ctx.pid;
|
||||
}
|
||||
},
|
||||
}};
|
||||
|
||||
std::array<JNINativeMethod, 5> specialize_app_methods = {{
|
||||
// nativeSpecializeAppProcess_q
|
||||
{
|
||||
"nativeSpecializeAppProcess",
|
||||
"(II[II[[IILjava/lang/String;Ljava/lang/String;ZLjava/lang/String;Ljava/lang/String;)V",
|
||||
@@ -173,12 +216,13 @@ std::array<JNINativeMethod, 17> zygote_methods = {{
|
||||
args.is_child_zygote = &is_child_zygote;
|
||||
ZygiskContext ctx(env, &args);
|
||||
ctx.nativeSpecializeAppProcess_pre();
|
||||
reinterpret_cast<void(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir)>(g_hook->zygote_methods[10].fnPtr)(
|
||||
reinterpret_cast<void(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir)>(get_defs()->specialize_app_methods[0].fnPtr)(
|
||||
env, clazz, uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, nice_name, is_child_zygote, instruction_set, app_data_dir
|
||||
);
|
||||
ctx.nativeSpecializeAppProcess_post();
|
||||
}
|
||||
},
|
||||
// nativeSpecializeAppProcess_q_alt
|
||||
{
|
||||
"nativeSpecializeAppProcess",
|
||||
"(II[II[[IILjava/lang/String;Ljava/lang/String;ZLjava/lang/String;Ljava/lang/String;Z)V",
|
||||
@@ -188,12 +232,13 @@ std::array<JNINativeMethod, 17> zygote_methods = {{
|
||||
args.is_top_app = &is_top_app;
|
||||
ZygiskContext ctx(env, &args);
|
||||
ctx.nativeSpecializeAppProcess_pre();
|
||||
reinterpret_cast<void(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir, jboolean is_top_app)>(g_hook->zygote_methods[11].fnPtr)(
|
||||
reinterpret_cast<void(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir, jboolean is_top_app)>(get_defs()->specialize_app_methods[1].fnPtr)(
|
||||
env, clazz, uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, nice_name, is_child_zygote, instruction_set, app_data_dir, is_top_app
|
||||
);
|
||||
ctx.nativeSpecializeAppProcess_post();
|
||||
}
|
||||
},
|
||||
// nativeSpecializeAppProcess_r
|
||||
{
|
||||
"nativeSpecializeAppProcess",
|
||||
"(II[II[[IILjava/lang/String;Ljava/lang/String;ZLjava/lang/String;Ljava/lang/String;Z[Ljava/lang/String;[Ljava/lang/String;ZZ)V",
|
||||
@@ -207,12 +252,13 @@ std::array<JNINativeMethod, 17> zygote_methods = {{
|
||||
args.mount_storage_dirs = &mount_storage_dirs;
|
||||
ZygiskContext ctx(env, &args);
|
||||
ctx.nativeSpecializeAppProcess_pre();
|
||||
reinterpret_cast<void(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir, jboolean is_top_app, jobjectArray pkg_data_info_list, jobjectArray whitelisted_data_info_list, jboolean mount_data_dirs, jboolean mount_storage_dirs)>(g_hook->zygote_methods[12].fnPtr)(
|
||||
reinterpret_cast<void(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir, jboolean is_top_app, jobjectArray pkg_data_info_list, jobjectArray whitelisted_data_info_list, jboolean mount_data_dirs, jboolean mount_storage_dirs)>(get_defs()->specialize_app_methods[2].fnPtr)(
|
||||
env, clazz, uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, nice_name, is_child_zygote, instruction_set, app_data_dir, is_top_app, pkg_data_info_list, whitelisted_data_info_list, mount_data_dirs, mount_storage_dirs
|
||||
);
|
||||
ctx.nativeSpecializeAppProcess_post();
|
||||
}
|
||||
},
|
||||
// nativeSpecializeAppProcess_u
|
||||
{
|
||||
"nativeSpecializeAppProcess",
|
||||
"(II[II[[IILjava/lang/String;Ljava/lang/String;ZLjava/lang/String;Ljava/lang/String;Z[Ljava/lang/String;[Ljava/lang/String;ZZZ)V",
|
||||
@@ -227,12 +273,13 @@ std::array<JNINativeMethod, 17> zygote_methods = {{
|
||||
args.mount_sysprop_overrides = &mount_sysprop_overrides;
|
||||
ZygiskContext ctx(env, &args);
|
||||
ctx.nativeSpecializeAppProcess_pre();
|
||||
reinterpret_cast<void(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir, jboolean is_top_app, jobjectArray pkg_data_info_list, jobjectArray whitelisted_data_info_list, jboolean mount_data_dirs, jboolean mount_storage_dirs, jboolean mount_sysprop_overrides)>(g_hook->zygote_methods[13].fnPtr)(
|
||||
reinterpret_cast<void(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jstring nice_name, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir, jboolean is_top_app, jobjectArray pkg_data_info_list, jobjectArray whitelisted_data_info_list, jboolean mount_data_dirs, jboolean mount_storage_dirs, jboolean mount_sysprop_overrides)>(get_defs()->specialize_app_methods[3].fnPtr)(
|
||||
env, clazz, uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, nice_name, is_child_zygote, instruction_set, app_data_dir, is_top_app, pkg_data_info_list, whitelisted_data_info_list, mount_data_dirs, mount_storage_dirs, mount_sysprop_overrides
|
||||
);
|
||||
ctx.nativeSpecializeAppProcess_post();
|
||||
}
|
||||
},
|
||||
// nativeSpecializeAppProcess_samsung_q
|
||||
{
|
||||
"nativeSpecializeAppProcess",
|
||||
"(II[II[[IILjava/lang/String;IILjava/lang/String;ZLjava/lang/String;Ljava/lang/String;)V",
|
||||
@@ -241,12 +288,16 @@ std::array<JNINativeMethod, 17> zygote_methods = {{
|
||||
args.is_child_zygote = &is_child_zygote;
|
||||
ZygiskContext ctx(env, &args);
|
||||
ctx.nativeSpecializeAppProcess_pre();
|
||||
reinterpret_cast<void(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jint _9, jint _10, jstring nice_name, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir)>(g_hook->zygote_methods[14].fnPtr)(
|
||||
reinterpret_cast<void(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jint mount_external, jstring se_info, jint _9, jint _10, jstring nice_name, jboolean is_child_zygote, jstring instruction_set, jstring app_data_dir)>(get_defs()->specialize_app_methods[4].fnPtr)(
|
||||
env, clazz, uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, _9, _10, nice_name, is_child_zygote, instruction_set, app_data_dir
|
||||
);
|
||||
ctx.nativeSpecializeAppProcess_post();
|
||||
}
|
||||
},
|
||||
}};
|
||||
|
||||
std::array<JNINativeMethod, 2> fork_server_methods = {{
|
||||
// nativeForkSystemServer_l
|
||||
{
|
||||
"nativeForkSystemServer",
|
||||
"(II[II[[IJJ)I",
|
||||
@@ -254,13 +305,14 @@ std::array<JNINativeMethod, 17> zygote_methods = {{
|
||||
ServerSpecializeArgs_v1 args(uid, gid, gids, runtime_flags, permitted_capabilities, effective_capabilities);
|
||||
ZygiskContext ctx(env, &args);
|
||||
ctx.nativeForkSystemServer_pre();
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jlong permitted_capabilities, jlong effective_capabilities)>(g_hook->zygote_methods[15].fnPtr)(
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jobjectArray rlimits, jlong permitted_capabilities, jlong effective_capabilities)>(get_defs()->fork_server_methods[0].fnPtr)(
|
||||
env, clazz, uid, gid, gids, runtime_flags, rlimits, permitted_capabilities, effective_capabilities
|
||||
);
|
||||
ctx.nativeForkSystemServer_post();
|
||||
return ctx.pid;
|
||||
}
|
||||
},
|
||||
// nativeForkSystemServer_samsung_q
|
||||
{
|
||||
"nativeForkSystemServer",
|
||||
"(II[IIII[[IJJ)I",
|
||||
@@ -268,7 +320,7 @@ std::array<JNINativeMethod, 17> zygote_methods = {{
|
||||
ServerSpecializeArgs_v1 args(uid, gid, gids, runtime_flags, permitted_capabilities, effective_capabilities);
|
||||
ZygiskContext ctx(env, &args);
|
||||
ctx.nativeForkSystemServer_pre();
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jint _11, jint _12, jobjectArray rlimits, jlong permitted_capabilities, jlong effective_capabilities)>(g_hook->zygote_methods[16].fnPtr)(
|
||||
reinterpret_cast<jint(*)(JNIEnv *env, jclass clazz, jint uid, jint gid, jintArray gids, jint runtime_flags, jint _11, jint _12, jobjectArray rlimits, jlong permitted_capabilities, jlong effective_capabilities)>(get_defs()->fork_server_methods[1].fnPtr)(
|
||||
env, clazz, uid, gid, gids, runtime_flags, _11, _12, rlimits, permitted_capabilities, effective_capabilities
|
||||
);
|
||||
ctx.nativeForkSystemServer_post();
|
||||
@@ -277,3 +329,4 @@ std::array<JNINativeMethod, 17> zygote_methods = {{
|
||||
},
|
||||
}};
|
||||
|
||||
};
|
||||
|
||||
@@ -208,6 +208,12 @@ bool ZygiskContext::plt_hook_commit() {
|
||||
{
|
||||
mutex_guard lock(hook_info_lock);
|
||||
plt_hook_process_regex();
|
||||
for (auto& reg: register_info) {
|
||||
regfree(®.regex);
|
||||
}
|
||||
for (auto& ign: ignore_info) {
|
||||
regfree(&ign.regex);
|
||||
}
|
||||
register_info.clear();
|
||||
ignore_info.clear();
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
#endif
|
||||
|
||||
// Extreme verbose logging
|
||||
//#define ZLOGV(...) ZLOGD(__VA_ARGS__)
|
||||
// #define ZLOGV(...) ZLOGD(__VA_ARGS__)
|
||||
#define ZLOGV(...) (void*)0
|
||||
|
||||
void hook_entry();
|
||||
|
||||
2
native/src/external/cxx-rs
vendored
2
native/src/external/cxx-rs
vendored
Submodule native/src/external/cxx-rs updated: 667377297a...b09b91554b
2
native/src/external/system_properties
vendored
2
native/src/external/system_properties
vendored
Submodule native/src/external/system_properties updated: f081b497cd...b7c2088565
@@ -7,11 +7,14 @@ edition.workspace = true
|
||||
crate-type = ["staticlib"]
|
||||
path = "lib.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[build-dependencies]
|
||||
cxx-gen = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
base = { path = "../base" }
|
||||
magiskpolicy = { path = "../sepolicy", default-features = false }
|
||||
base = { workspace = true }
|
||||
magiskpolicy = { workspace = true, features = ["no-main"] }
|
||||
cxx = { workspace = true }
|
||||
num-traits = { workspace = true }
|
||||
|
||||
@@ -12,6 +12,8 @@
|
||||
#include <base.hpp>
|
||||
#include <sepolicy.hpp>
|
||||
|
||||
using kv_pairs = std::vector<std::pair<std::string, std::string>>;
|
||||
|
||||
#include "init-rs.hpp"
|
||||
|
||||
int magisk_proxy_main(int, char *argv[]);
|
||||
|
||||
@@ -1,19 +1,11 @@
|
||||
use crate::ffi::backup_init;
|
||||
use crate::ffi::{BootConfig, MagiskInit, backup_init, magisk_proxy_main};
|
||||
use crate::logging::setup_klog;
|
||||
use crate::mount::is_rootfs;
|
||||
use crate::twostage::hexpatch_init_for_second_stage;
|
||||
use crate::{
|
||||
ffi::{BootConfig, MagiskInit, magisk_proxy_main},
|
||||
logging::setup_klog,
|
||||
};
|
||||
use base::{
|
||||
LibcReturn, LoggedResult, ResultExt, cstr, info,
|
||||
libc::{basename, getpid, mount, umask},
|
||||
raw_cstr,
|
||||
};
|
||||
use std::{
|
||||
ffi::{CStr, c_char},
|
||||
ptr::null,
|
||||
};
|
||||
use base::libc::{basename, getpid, mount, umask};
|
||||
use base::{LibcReturn, LoggedResult, ResultExt, cstr, info, raw_cstr};
|
||||
use std::ffi::{CStr, c_char};
|
||||
use std::ptr::null;
|
||||
|
||||
impl MagiskInit {
|
||||
fn new(argv: *mut *mut c_char) -> Self {
|
||||
|
||||
@@ -4,17 +4,13 @@ use base::{
|
||||
nix, parse_mount_info, raw_cstr,
|
||||
};
|
||||
use cxx::CxxString;
|
||||
use nix::{
|
||||
mount::MsFlags,
|
||||
sys::statfs::{FsType, TMPFS_MAGIC, statfs},
|
||||
unistd::{chdir, chroot},
|
||||
};
|
||||
use nix::mount::MsFlags;
|
||||
use nix::sys::statfs::{FsType, TMPFS_MAGIC, statfs};
|
||||
use nix::unistd::{chdir, chroot};
|
||||
use num_traits::AsPrimitive;
|
||||
use std::{
|
||||
collections::BTreeSet,
|
||||
ops::Bound::{Excluded, Unbounded},
|
||||
pin::Pin,
|
||||
};
|
||||
use std::collections::BTreeSet;
|
||||
use std::ops::Bound::{Excluded, Unbounded};
|
||||
use std::pin::Pin;
|
||||
|
||||
unsafe extern "C" {
|
||||
static environ: *const *mut libc::c_char;
|
||||
|
||||
@@ -5,13 +5,10 @@ use base::{
|
||||
BufReadExt, Directory, FsPathBuilder, LoggedResult, ResultExt, Utf8CStr, Utf8CString,
|
||||
clone_attr, cstr, debug,
|
||||
};
|
||||
use std::io::BufReader;
|
||||
use std::{
|
||||
fs::File,
|
||||
io::Write,
|
||||
mem,
|
||||
os::fd::{FromRawFd, RawFd},
|
||||
};
|
||||
use std::fs::File;
|
||||
use std::io::{BufReader, Write};
|
||||
use std::mem;
|
||||
use std::os::fd::{FromRawFd, RawFd};
|
||||
|
||||
pub fn inject_magisk_rc(fd: RawFd, tmp_dir: &Utf8CStr) {
|
||||
debug!("Injecting magisk rc");
|
||||
|
||||
2
native/src/rustfmt.toml
Normal file
2
native/src/rustfmt.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
unstable_features = true
|
||||
imports_granularity = "Module"
|
||||
@@ -7,14 +7,15 @@ edition.workspace = true
|
||||
crate-type = ["staticlib", "rlib"]
|
||||
path = "lib.rs"
|
||||
|
||||
[features]
|
||||
no-main = []
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[build-dependencies]
|
||||
cxx-gen = { workspace = true }
|
||||
|
||||
[features]
|
||||
default = ["main"]
|
||||
main = []
|
||||
|
||||
[dependencies]
|
||||
base = { path = "../base" }
|
||||
base = { workspace = true }
|
||||
cxx = { workspace = true }
|
||||
argh = { workspace = true }
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
use crate::ffi::SePolicy;
|
||||
use crate::statement::format_statement_help;
|
||||
use argh::FromArgs;
|
||||
use base::libc::umask;
|
||||
use base::{
|
||||
CmdArgs, EarlyExitExt, FmtAdaptor, LoggedResult, Utf8CString, cmdline_logging, cstr,
|
||||
libc::umask, log_err,
|
||||
CmdArgs, EarlyExitExt, FmtAdaptor, LoggedResult, Utf8CString, argh, cmdline_logging, cstr,
|
||||
log_err,
|
||||
};
|
||||
use std::ffi::c_char;
|
||||
use std::io::stderr;
|
||||
|
||||
@@ -8,7 +8,7 @@ use crate::ffi::SePolicy;
|
||||
#[path = "../include/consts.rs"]
|
||||
mod consts;
|
||||
|
||||
#[cfg(feature = "main")]
|
||||
#[cfg(not(feature = "no-main"))]
|
||||
mod cli;
|
||||
mod rules;
|
||||
mod statement;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use crate::SePolicy;
|
||||
use crate::consts::{SEPOL_FILE_TYPE, SEPOL_LOG_TYPE, SEPOL_PROC_DOMAIN};
|
||||
use crate::{SePolicy, ffi::Xperm};
|
||||
use crate::ffi::Xperm;
|
||||
use base::{LogLevel, set_log_level_state};
|
||||
|
||||
macro_rules! rules {
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
use std::fmt::{Display, Formatter, Write};
|
||||
use std::io::{BufRead, BufReader, Cursor};
|
||||
use std::{iter::Peekable, vec::IntoIter};
|
||||
use std::iter::Peekable;
|
||||
use std::vec::IntoIter;
|
||||
|
||||
use crate::SePolicy;
|
||||
use crate::ffi::Xperm;
|
||||
use base::{BufReadExt, LoggedResult, Utf8CStr, error, nix::fcntl::OFlag, warn};
|
||||
use base::nix::fcntl::OFlag;
|
||||
use base::{BufReadExt, LoggedResult, Utf8CStr, error, warn};
|
||||
|
||||
pub enum Token<'a> {
|
||||
AL,
|
||||
|
||||
@@ -32,17 +32,17 @@ case $(uname -m) in
|
||||
esac
|
||||
|
||||
cleanup() {
|
||||
pkill -INT -P $$
|
||||
wait
|
||||
trap - EXIT
|
||||
rm -f magisk_*.img
|
||||
"$avd" delete avd -n test
|
||||
exit 1
|
||||
}
|
||||
|
||||
test_error() {
|
||||
trap - EXIT
|
||||
print_error "! An error occurred"
|
||||
pkill -INT -P $$
|
||||
wait
|
||||
cleanup
|
||||
exit 1
|
||||
}
|
||||
|
||||
wait_for_boot() {
|
||||
@@ -72,13 +72,14 @@ wait_emu() {
|
||||
|
||||
dump_vars() {
|
||||
local val
|
||||
for name in $@; do
|
||||
for name in $@ emu_args; do
|
||||
eval val=\$$name
|
||||
echo $name=\"$val\"\;
|
||||
done
|
||||
}
|
||||
|
||||
resolve_vars() {
|
||||
set +x
|
||||
local arg_list="$1"
|
||||
local ver=$2
|
||||
local type=$3
|
||||
@@ -138,8 +139,14 @@ dl_emu() {
|
||||
|
||||
setup_emu() {
|
||||
local avd_pkg=$1
|
||||
local ver=$2
|
||||
dl_emu $avd_pkg
|
||||
echo no | "$avd" create avd -f -n test -k $avd_pkg
|
||||
|
||||
# avdmanager is outdated, it might not set the proper target
|
||||
local ini=$ANDROID_AVD_HOME/test.ini
|
||||
sed "s:^target\s*=.*:target=android-$ver:g" $ini > $ini.new
|
||||
mv $ini.new $ini
|
||||
}
|
||||
|
||||
test_emu() {
|
||||
@@ -169,16 +176,15 @@ test_emu() {
|
||||
}
|
||||
|
||||
test_main() {
|
||||
local avd_pkg ramdisk vars
|
||||
vars=$(resolve_vars "emu_args avd_pkg ramdisk" $1 $2)
|
||||
eval $vars
|
||||
local ver avd_pkg ramdisk
|
||||
eval $(resolve_vars "ver avd_pkg ramdisk" $1 $2)
|
||||
|
||||
# Specify an explicit port so that tests can run with other emulators running at the same time
|
||||
local emu_port=5682
|
||||
emu_args="$emu_args -port $emu_port"
|
||||
export ANDROID_SERIAL="emulator-$emu_port"
|
||||
|
||||
setup_emu "$avd_pkg"
|
||||
setup_emu "$avd_pkg" $ver
|
||||
|
||||
# Restart ADB daemon just in case
|
||||
adb kill-server
|
||||
@@ -211,24 +217,21 @@ test_main() {
|
||||
test_emu release
|
||||
fi
|
||||
|
||||
# Cleanup
|
||||
rm -f magisk_*.img
|
||||
"$avd" delete avd -n test
|
||||
cleanup
|
||||
}
|
||||
|
||||
run_main() {
|
||||
local avd_pkg vars
|
||||
vars=$(resolve_vars "emu_args avd_pkg" $1 $2)
|
||||
eval $vars
|
||||
setup_emu "$avd_pkg"
|
||||
local ver avd_pkg
|
||||
eval $(resolve_vars "ver avd_pkg" $1 $2)
|
||||
setup_emu "$avd_pkg" $ver
|
||||
print_title "* Launching $avd_pkg"
|
||||
"$emu" @test $emu_args 2>/dev/null
|
||||
cleanup
|
||||
}
|
||||
|
||||
dl_main() {
|
||||
local avd_pkg vars
|
||||
vars=$(resolve_vars "avd_pkg" $1 $2)
|
||||
eval $vars
|
||||
local avd_pkg
|
||||
eval $(resolve_vars "avd_pkg" $1 $2)
|
||||
print_title "* Downloading $avd_pkg"
|
||||
dl_emu "$avd_pkg"
|
||||
}
|
||||
|
||||
@@ -36,25 +36,6 @@ disable_version_config() {
|
||||
sed -i "s:^version=:# version=:g" $CONFIG
|
||||
}
|
||||
|
||||
bump_canary_version() {
|
||||
# Update version code
|
||||
local code=$(grep_prop magisk.versionCode $GCONFIG)
|
||||
code=$((code + 1))
|
||||
local tag="canary-$code"
|
||||
sed -i "s:versionCode=.*:versionCode=${code}:g" $GCONFIG
|
||||
|
||||
# Commit version code changes
|
||||
git add -u .
|
||||
git status
|
||||
git commit -m "Release new canary build" -m "[skip ci]"
|
||||
git tag $tag
|
||||
|
||||
# Update version name
|
||||
local ver=$(git rev-parse --short=8 HEAD)
|
||||
sed -i "s:version=.*:version=${ver}:g" $CONFIG
|
||||
sed -i "1s:.*:## Magisk (${ver}) (${code}):" $NOTES
|
||||
}
|
||||
|
||||
# $1 = ver
|
||||
set_version() {
|
||||
local ver=$1
|
||||
@@ -69,89 +50,45 @@ set_version() {
|
||||
git add -u .
|
||||
git status
|
||||
git commit -m "Release Magisk v$ver" -m "[skip ci]"
|
||||
git tag $tag
|
||||
}
|
||||
|
||||
build_apk() {
|
||||
# $1 = ver
|
||||
build() {
|
||||
[ -z $1 ] && exit 1
|
||||
local ver=$1
|
||||
git pull
|
||||
set_version $ver
|
||||
$BUILDCMD clean
|
||||
$BUILDCMD all
|
||||
$BUILDCMD -r all
|
||||
}
|
||||
|
||||
build_canary() {
|
||||
bump_canary_version
|
||||
build_apk
|
||||
}
|
||||
|
||||
# $1 = ver
|
||||
build_public() {
|
||||
[ -z $1 ] && exit 1
|
||||
local ver=$1
|
||||
set_version $ver
|
||||
build_apk
|
||||
}
|
||||
|
||||
upload() {
|
||||
# Verify pattern
|
||||
[[ "$1" =~ canary|beta|stable ]]
|
||||
local type=$1
|
||||
|
||||
gh auth status
|
||||
|
||||
local latest_tag=$(git describe --abbrev=0 --tags)
|
||||
local ver=$(grep_prop version $CONFIG)
|
||||
local code=$(grep_prop magisk.versionCode $GCONFIG)
|
||||
local out=$(grep_prop outdir $CONFIG)
|
||||
local tag title
|
||||
local ver=$(echo - | awk "{ print $code / 1000 }")
|
||||
local tag="v$ver"
|
||||
local title="Magisk v$ver"
|
||||
|
||||
local out=$(grep_prop outdir $CONFIG)
|
||||
if [ -z $out ]; then
|
||||
out=out
|
||||
fi
|
||||
|
||||
git tag $tag
|
||||
git push origin master
|
||||
git push --tags
|
||||
|
||||
# Prepare release notes
|
||||
tail -n +3 $NOTES > release.md
|
||||
|
||||
case $type in
|
||||
canary )
|
||||
tag="canary-$code"
|
||||
title="Magisk ($ver) ($code)"
|
||||
# Publish release
|
||||
local release_apk="Magisk-v${ver}.apk"
|
||||
cp $out/app-release.apk $release_apk
|
||||
gh release create --verify-tag $tag -p -t "$title" -F release.md $release_apk $out/app-debug.apk $NOTES
|
||||
|
||||
# Assert tag format
|
||||
[ $latest_tag = $tag ]
|
||||
|
||||
# Publish release
|
||||
gh release create --verify-tag $tag -p -t "$title" -F release.md $out/app-release.apk $out/app-debug.apk $NOTES
|
||||
;;
|
||||
beta|stable )
|
||||
tag="v$ver"
|
||||
title="Magisk v$ver"
|
||||
|
||||
# Assert tag format
|
||||
[ $latest_tag = $tag ]
|
||||
|
||||
# Publish release
|
||||
local release_apk="Magisk-v${ver}.apk"
|
||||
cp $out/app-release.apk $release_apk
|
||||
gh release create --verify-tag $tag -p -t "$title" -F release.md $release_apk $out/app-debug.apk $NOTES
|
||||
rm -f $release_apk
|
||||
;;
|
||||
esac
|
||||
|
||||
# If publishing stable, make it not prerelease and explicitly latest
|
||||
if [ $type = "stable" ]; then
|
||||
gh release edit $tag --prerelease=false --latest
|
||||
fi
|
||||
|
||||
rm -f release.md
|
||||
}
|
||||
|
||||
revert() {
|
||||
local latest_tag=$(git describe --abbrev=0 --tags)
|
||||
git tag -d $latest_tag
|
||||
git reset --hard HEAD~
|
||||
rm -f $release_apk release.md
|
||||
}
|
||||
|
||||
# Use GNU sed on macOS
|
||||
@@ -160,14 +97,10 @@ if command -v gsed >/dev/null; then
|
||||
export -f sed
|
||||
fi
|
||||
|
||||
git pull
|
||||
|
||||
trap disable_version_config EXIT
|
||||
ensure_config
|
||||
case $1 in
|
||||
canary ) build_canary ;;
|
||||
public ) build_public $2 ;;
|
||||
upload ) upload $2 ;;
|
||||
revert ) revert ;;
|
||||
build ) build $2 ;;
|
||||
upload ) upload ;;
|
||||
* ) exit 1 ;;
|
||||
esac
|
||||
|
||||
Reference in New Issue
Block a user