mirror of
https://github.com/Lakr233/vphone-cli.git
synced 2026-04-05 04:59:05 +08:00
Included commits: - f8a54b8 Update JB kernel patch research notes Refresh and revalidate jailbreak kernel-patcher documentation and runtime-verification notes. Key updates: re-analyzed B13 (patch_bsd_init_auth) and retargeted recommended site to the FSIOC_KERNEL_ROOTAUTH return check in bsd_init rather than the old ldr/cbz/bl heuristic; clarified preferred NOP-of-CBNZ vs forcing ioctl return. Reworked C21 (patch_cred_label_update_execve) to preserve AMFI exec-time flow and instead clear restrictive csflags in a success-tail trampoline; disabled in default schedule until boot validation. Documented that C23 (patch_hook_cred_label_update_execve) was mis-targeting the wrapper (sub_FFFFFE00093D2CE4) instead of the real hook body (_hook_cred_label_update_execve), explaining boot failures and recommending retargeting. Noted syscallmask and vm_fault matcher problems (patch_syscallmask_apply_to_proc historical hit targeted _profile_syscallmask_destroy; patch_vm_fault_enter_prepare matcher resolves to pmap_lock_phys_page path), and updated the runtime-verification summary with follow-up findings and which methods are temporarily commented out/disabled in the default KernelJBPatcher schedule pending staged re-validation. - 6ebac65 fix: patch_bsd_init_auth - 5b224d3 fix: patch_io_secure_bsd_root - e6806bf docs: update patch notes - 0d89c5c Retarget vm_fault_enter_prepare jailbreak patch - 6b9d79b Rework C21 late-exit cred_label patch - ece8cc0 Clean C21 mov matcher encodings - ad2ea7c enabled fixed patch_cred_label_update_execve - c37b6b1 Rebuild syscallmask C22 patch - 363dd7a Rebuild JB C23 as faithful upstream trampoline - 129e648 Disable IOUC MACF; rebuild kcall10 & C22 docs Re-evaluate and rework several JB kernel patches and docs: mark patch_iouc_failed_macf as reverted/disabled (repo-local, over-broad early-return) and replace its patcher with a no-op implementation to emit zero writes by default; update research notes to explain the reanalysis and rationale. Rebuild patch_kcall10: replace the historical 10-arg design with an ABI-correct syscall-439 cave (target + 7 args -> uint64 return), add a new cave builder and munge32 reuse logic in the kcall10 patcher, and enable the method in KernelJBPatcher group. Clarify syscallmask (C22) semantics in docs: upstream C22 is an all-ones-mask retarget (not a NULL install) and keep the rebuilt all-ones wrapper as the authoritative baseline. Misc: minor refactors and helper additions (chained-pointer helpers, cave size/constants, validation and dry-run safeguards) to improve correctness and alignment with IDA/runtime verification. - e1b2365 Rebuild kcall10 as ABI-correct syscall cave - 23090d0 fix patch_iouc_failed_macf - 0056be2 Normalize formatting in research docs Apply whitespace and formatting cleanup across research markdown files for consistency and readability. Adjust table alignment and spacing in 00_patch_comparison_all_variants.md, normalize list/indentation spacing in patch_bsd_init_auth.md and patch_syscallmask_apply_to_proc.md, and add/clean blank lines and minor spacing in patch_kcall10.md. These are non-functional documentation changes only.
202 lines
7.2 KiB
Python
202 lines
7.2 KiB
Python
"""Mixin: KernelJBPatchSecureRootMixin."""
|
|
|
|
from .kernel_jb_base import ARM64_OP_IMM, asm
|
|
|
|
|
|
class KernelJBPatchSecureRootMixin:
|
|
_SECURE_ROOT_MATCH_OFFSET = 0x11A
|
|
|
|
def patch_io_secure_bsd_root(self):
|
|
"""Force the SecureRootName policy return to success.
|
|
|
|
Historical versions of this patch matched the first BL* + CBZ/CBNZ W0
|
|
inside the AppleARMPE secure-root dispatch function and rewrote the
|
|
"SecureRoot" gate. That site is semantically wrong and can perturb the
|
|
broader platform-function dispatch path.
|
|
|
|
The correct minimal bypass is the final CSEL in the "SecureRootName"
|
|
path that selects between success (0) and kIOReturnNotPrivileged.
|
|
"""
|
|
self._log("\n[JB] _IOSecureBSDRoot: force SecureRootName success")
|
|
|
|
func_candidates = self._find_secure_root_functions()
|
|
if not func_candidates:
|
|
self._log(" [-] secure-root dispatch function not found")
|
|
return False
|
|
|
|
for func_start in sorted(func_candidates):
|
|
func_end = self._find_func_end(func_start, 0x1200)
|
|
site = self._find_secure_root_return_site(func_start, func_end)
|
|
if not site:
|
|
continue
|
|
|
|
off, reg_name = site
|
|
patch_bytes = self._compile_zero_return_checked(reg_name)
|
|
self.emit(
|
|
off,
|
|
patch_bytes,
|
|
f"mov {reg_name}, #0 [_IOSecureBSDRoot SecureRootName allow]",
|
|
)
|
|
return True
|
|
|
|
self._log(" [-] SecureRootName deny-return site not found")
|
|
return False
|
|
|
|
def _find_secure_root_functions(self):
|
|
funcs_with_name = self._functions_referencing_string(b"SecureRootName")
|
|
if not funcs_with_name:
|
|
return set()
|
|
|
|
funcs_with_root = self._functions_referencing_string(b"SecureRoot")
|
|
common = funcs_with_name & funcs_with_root
|
|
if common:
|
|
return common
|
|
return funcs_with_name
|
|
|
|
def _functions_referencing_string(self, needle):
|
|
func_starts = set()
|
|
for str_off in self._all_cstring_offsets(needle):
|
|
refs = self.find_string_refs(str_off, *self.kern_text)
|
|
for adrp_off, _, _ in refs:
|
|
fn = self.find_function_start(adrp_off)
|
|
if fn >= 0:
|
|
func_starts.add(fn)
|
|
return func_starts
|
|
|
|
def _all_cstring_offsets(self, needle):
|
|
if isinstance(needle, str):
|
|
needle = needle.encode()
|
|
out = []
|
|
start = 0
|
|
while True:
|
|
pos = self.raw.find(needle, start)
|
|
if pos < 0:
|
|
break
|
|
cstr = pos
|
|
while cstr > 0 and self.raw[cstr - 1] != 0:
|
|
cstr -= 1
|
|
cend = self.raw.find(b"\x00", cstr)
|
|
if cend > cstr and self.raw[cstr:cend] == needle:
|
|
out.append(cstr)
|
|
start = pos + 1
|
|
return sorted(set(out))
|
|
|
|
def _find_secure_root_return_site(self, func_start, func_end):
|
|
for off in range(func_start, func_end - 4, 4):
|
|
dis = self._disas_at(off)
|
|
if not dis:
|
|
continue
|
|
ins = dis[0]
|
|
if ins.mnemonic != "csel" or len(ins.operands) != 3:
|
|
continue
|
|
if ins.op_str.replace(" ", "").split(",")[-1] != "ne":
|
|
continue
|
|
|
|
dest = ins.reg_name(ins.operands[0].reg)
|
|
zero_src = ins.reg_name(ins.operands[1].reg)
|
|
err_src = ins.reg_name(ins.operands[2].reg)
|
|
if zero_src not in ("wzr", "xzr"):
|
|
continue
|
|
if not dest.startswith("w"):
|
|
continue
|
|
if not self._has_secure_rootname_return_context(off, func_start, err_src):
|
|
continue
|
|
if not self._has_secure_rootname_compare_context(off, func_start):
|
|
continue
|
|
|
|
return off, dest
|
|
return None
|
|
|
|
def _has_secure_rootname_return_context(self, off, func_start, err_reg_name):
|
|
saw_flag_load = False
|
|
saw_flag_test = False
|
|
saw_err_build = False
|
|
lookback_start = max(func_start, off - 0x40)
|
|
|
|
for probe in range(off - 4, lookback_start - 4, -4):
|
|
dis = self._disas_at(probe)
|
|
if not dis:
|
|
continue
|
|
ins = dis[0]
|
|
ops = ins.op_str.replace(" ", "")
|
|
|
|
if not saw_flag_test and ins.mnemonic == "tst" and ops.endswith("#1"):
|
|
saw_flag_test = True
|
|
continue
|
|
|
|
if (
|
|
saw_flag_test
|
|
and not saw_flag_load
|
|
and ins.mnemonic == "ldrb"
|
|
and f"[x19,#0x{self._SECURE_ROOT_MATCH_OFFSET:x}]" in ops
|
|
):
|
|
saw_flag_load = True
|
|
continue
|
|
|
|
if self._writes_register(ins, err_reg_name) and ins.mnemonic in ("mov", "movk", "sub"):
|
|
saw_err_build = True
|
|
|
|
return saw_flag_load and saw_flag_test and saw_err_build
|
|
|
|
def _has_secure_rootname_compare_context(self, off, func_start):
|
|
saw_match_store = False
|
|
saw_cset_eq = False
|
|
saw_cmp_w0_zero = False
|
|
lookback_start = max(func_start, off - 0xA0)
|
|
|
|
for probe in range(off - 4, lookback_start - 4, -4):
|
|
dis = self._disas_at(probe)
|
|
if not dis:
|
|
continue
|
|
ins = dis[0]
|
|
ops = ins.op_str.replace(" ", "")
|
|
|
|
if (
|
|
not saw_match_store
|
|
and ins.mnemonic == "strb"
|
|
and f"[x19,#0x{self._SECURE_ROOT_MATCH_OFFSET:x}]" in ops
|
|
):
|
|
saw_match_store = True
|
|
continue
|
|
|
|
if saw_match_store and not saw_cset_eq and ins.mnemonic == "cset" and ops.endswith(",eq"):
|
|
saw_cset_eq = True
|
|
continue
|
|
|
|
if saw_match_store and saw_cset_eq and not saw_cmp_w0_zero and ins.mnemonic == "cmp":
|
|
if ops.startswith("w0,#0"):
|
|
saw_cmp_w0_zero = True
|
|
break
|
|
|
|
return saw_match_store and saw_cset_eq and saw_cmp_w0_zero
|
|
|
|
def _writes_register(self, ins, reg_name):
|
|
if not ins.operands:
|
|
return False
|
|
first = ins.operands[0]
|
|
if getattr(first, "type", None) != 1:
|
|
return False
|
|
return ins.reg_name(first.reg) == reg_name
|
|
|
|
def _compile_zero_return_checked(self, reg_name):
|
|
patch_bytes = asm(f"mov {reg_name}, #0")
|
|
insns = self._disas_n(patch_bytes, 0, 1)
|
|
assert insns, "capstone decode failed for secure-root zero-return patch"
|
|
ins = insns[0]
|
|
assert ins.mnemonic == "mov", (
|
|
f"secure-root zero-return decode mismatch: expected 'mov', got '{ins.mnemonic}'"
|
|
)
|
|
got_dst = ins.reg_name(ins.operands[0].reg)
|
|
assert got_dst == reg_name, (
|
|
f"secure-root zero-return destination mismatch: expected '{reg_name}', got '{got_dst}'"
|
|
)
|
|
got_imm = None
|
|
for op in ins.operands[1:]:
|
|
if op.type == ARM64_OP_IMM:
|
|
got_imm = op.imm
|
|
break
|
|
assert got_imm == 0, (
|
|
f"secure-root zero-return immediate mismatch: expected 0, got {got_imm}"
|
|
)
|
|
return patch_bytes
|