initial code for dumping imessages in a reasonable format

This commit is contained in:
Jeffrey Paul
2014-02-09 00:30:49 +01:00
parent c0021efb13
commit 9dd7628f04
157 changed files with 24178 additions and 0 deletions

View File

@@ -0,0 +1,3 @@
Dependencies
pycrypto (https://www.dlitz.net/software/pycrypto/)
construct (http://construct.wikispaces.com/)

View File

@@ -0,0 +1,78 @@
from backups.backup3 import decrypt_backup3
from backups.backup4 import MBDB
from keystore.keybag import Keybag
from util import readPlist, makedirs
import os
import sys
import plistlib
showinfo = ["Device Name", "Display Name", "Last Backup Date", "IMEI",
"Serial Number", "Product Type", "Product Version", "iTunes Version"]
def extract_backup(backup_path, output_path, password=""):
if not os.path.exists(backup_path + "/Manifest.plist"):
print "Manifest.plist not found"
return
manifest = readPlist(backup_path + "/Manifest.plist")
info = readPlist( backup_path + "/Info.plist")
for i in showinfo:
print i + " : " + unicode(info.get(i, "missing"))
print "Extract backup to %s ? (y/n)" % output_path
if raw_input() == "n":
return
print "Backup is %sencrypted" % (int(not manifest["IsEncrypted"]) * "not ")
if manifest["IsEncrypted"] and password == "":
print "Enter backup password : "
password = raw_input()
if not manifest.has_key("BackupKeyBag"):
print "No BackupKeyBag in manifest, assuming iOS 3.x backup"
decrypt_backup3(backup_path, output_path, password)
else:
mbdb = MBDB(backup_path)
kb = Keybag.createWithBackupManifest(manifest, password)
if not kb:
return
manifest["password"] = password
makedirs(output_path)
plistlib.writePlist(manifest, output_path + "/Manifest.plist")
mbdb.keybag = kb
mbdb.extract_backup(output_path)
print "You can decrypt the keychain using the following command : "
print "python keychain_tool.py -d \"%s\" \"%s\"" % (output_path + "/KeychainDomain/keychain-backup.plist", output_path + "/Manifest.plist")
def extract_all():
if sys.platform == "win32":
mobilesync = os.environ["APPDATA"] + "/Apple Computer/MobileSync/Backup/"
elif sys.platform == "darwin":
mobilesync = os.environ["HOME"] + "/Library/Application Support/MobileSync/Backup/"
else:
print "Unsupported operating system"
return
print "-" * 60
print "Searching for iTunes backups"
print "-" * 60
for udid in os.listdir(mobilesync):
extract_backup(mobilesync + "/" + udid, udid + "_extract")
def main():
if len(sys.argv) < 2:
print "Usage: %s <backup path> [output path]" % sys.argv[0]
return
backup_path = sys.argv[1]
output_path = os.path.dirname(backup_path) + "_extract"
if len(sys.argv) >= 3:
output_path = sys.argv[2]
extract_backup(backup_path, output_path)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,67 @@
from crypto.PBKDF2 import PBKDF2
from crypto.aes import AESdecryptCBC
from util import read_file, write_file, makedirs, readPlist
from util.bplist import BPlistReader
import hashlib
import struct
import glob
import sys
import os
"""
decrypt iOS 3 backup blob (metadata and file contents)
"""
def decrypt_blob(blob, auth_key):
len = struct.unpack(">H", blob[0:2])[0]
if len != 66:
print "blob len != 66"
magic = struct.unpack(">H", blob[2:4])[0]
if magic != 0x0100:
print "magic != 0x0100"
iv = blob[4:20]
blob_key = AESdecryptCBC(blob[20:68], auth_key, iv)[:32]
return AESdecryptCBC(blob[68:], blob_key, iv, padding=True)
def decrypt_backup3(backupfolder, outputfolder, passphrase):
auth_key = None
manifest = readPlist(backupfolder + "/Manifest.plist")
if manifest["IsEncrypted"]:
manifest_data = manifest["Data"].data
authdata = manifest["AuthData"].data
pkbdf_salt = authdata[:8]
iv = authdata[8:24]
key = PBKDF2(passphrase,pkbdf_salt,iterations=2000).read(32)
data = AESdecryptCBC(authdata[24:], key, iv)
auth_key = data[:32]
if hashlib.sha1(auth_key).digest() != data[32:52]:
print "wrong auth key (hash mismatch) => wrong passphrase"
return
print "Passphrase seems OK"
for mdinfo_name in glob.glob(backupfolder + "/*.mdinfo"):
mddata_name = mdinfo_name[:-7] + ".mddata"
mdinfo = readPlist(mdinfo_name)
metadata = mdinfo["Metadata"].data
if mdinfo["IsEncrypted"]:
metadata = decrypt_blob(metadata, auth_key)
metadata = BPlistReader.plistWithString(metadata)
print metadata["Path"]
filedata = read_file(mddata_name)
if mdinfo["IsEncrypted"]:
filedata = decrypt_blob(filedata, auth_key)
filename = metadata["Path"]
makedirs(outputfolder + "/" + os.path.dirname(filename))
write_file(outputfolder + "/" + filename, filedata)

View File

@@ -0,0 +1,174 @@
from Crypto.Cipher import AES
from hashlib import sha1
from struct import unpack
import os
MBDB_SIGNATURE = 'mbdb\x05\x00'
MASK_SYMBOLIC_LINK = 0xa000
MASK_REGULAR_FILE = 0x8000
MASK_DIRECTORY = 0x4000
def warn(msg):
print "WARNING: %s" % msg
class MBFileRecord(object):
def __init__(self, mbdb):
self.domain = self._decode_string(mbdb)
if self.domain is None:
warn("Domain name missing from record")
self.path = self._decode_string(mbdb)
if self.path is None:
warn("Relative path missing from record")
self.target= self._decode_string(mbdb) # for symbolic links
self.digest = self._decode_string(mbdb)
self.encryption_key = self._decode_data(mbdb)
data = mbdb.read(40) # metadata, fixed size
self.mode, = unpack('>H', data[0:2])
if not(self.is_regular_file() or self.is_symbolic_link() or self.is_directory()):
print self.mode
warn("File type mising from record mode")
if self.is_symbolic_link() and self.target is None:
warn("Target required for symblolic links")
self.inode_number = unpack('>Q', data[2:10])
self.user_id, = unpack('>I', data[10:14])
self.group_id = unpack('>I', data[14:18])
self.last_modification_time, = unpack('>i', data[18:22])
self.last_status_change_time, = unpack('>i', data[22:26])
self.birth_time, = unpack('>i', data[26:30])
self.size, = unpack('>q', data[30:38])
if self.size != 0 and not self.is_regular_file():
warn("Non-zero size for a record which is not a regular file")
self.protection_class = ord(data[38])
num_attributes = ord(data[39])
if num_attributes == 0:
self.extended_attributes = None
else:
self.extended_attributes = {}
for i in xrange(num_attributes):
k = self._decode_string(mbdb)
v = self._decode_data(mbdb)
self.extended_attributes[k] = v
def _decode_string(self, s):
s_len, = unpack('>H', s.read(2))
if s_len == 0xffff:
return None
return s.read(s_len)
def _decode_data(self, s):
return self._decode_string(s)
def type(self):
return self.mode & 0xf000
def is_symbolic_link(self):
return self.type() == MASK_SYMBOLIC_LINK
def is_regular_file(self):
return self.type() == MASK_REGULAR_FILE
def is_directory(self):
return self.type() == MASK_DIRECTORY
class MBDB(object):
def __init__(self, path):
self.files = {}
self.backup_path = path
self.keybag = None
# open the database
mbdb = file(path + '/Manifest.mbdb', 'rb')
# skip signature
signature = mbdb.read(len(MBDB_SIGNATURE))
if signature != MBDB_SIGNATURE:
raise Exception("Bad mbdb signature")
try:
while True:
rec = MBFileRecord(mbdb)
fn = rec.domain + "-" + rec.path
sb = sha1(fn).digest().encode('hex')
if len(sb) % 2 == 1:
sb = '0'+sb
self.files[sb] = rec
except:
mbdb.close()
def get_file_by_name(self, filename):
for (k, v) in self.files.iteritems():
if v.path == filename:
return (k, v)
return None
def extract_backup(self, output_path):
for record in self.files.values():
# create directories if they do not exist
# makedirs throw an exception, my code is ugly =)
if record.is_directory():
try:
os.makedirs(os.path.join(output_path, record.domain, record.path))
except:
pass
for (filename, record) in self.files.items():
# skip directories
if record.is_directory():
continue
self.extract_file(filename, record, output_path)
def extract_file(self, filename, record, output_path):
# adjust output file name
if record.is_symbolic_link():
out_file = record.target
else:
out_file = record.path
# read backup file
try:
f1 = file(os.path.join(self.backup_path, filename), 'rb')
except(IOError):
warn("File %s (%s) has not been found" % (filename, record.path))
return
# write output file
output_path = os.path.join(output_path, record.domain, out_file)
print("Writing %s" % output_path)
f2 = file(output_path, 'wb')
aes = None
if record.encryption_key is not None and self.keybag: # file is encrypted!
key = self.keybag.unwrapKeyForClass(record.protection_class, record.encryption_key[4:])
if not key:
warn("Cannot unwrap key")
return
aes = AES.new(key, AES.MODE_CBC, "\x00"*16)
while True:
data = f1.read(8192)
if not data:
break
if aes:
data2 = data = aes.decrypt(data)
f2.write(data)
f1.close()
if aes:
c = data2[-1]
i = ord(c)
if i < 17 and data2.endswith(c*i):
f2.truncate(f2.tell() - i)
else:
warn("Bad padding, last byte = 0x%x !" % i)
f2.close()

View File

@@ -0,0 +1,354 @@
#!/usr/bin/python
# -*- coding: ascii -*-
###########################################################################
# PBKDF2.py - PKCS#5 v2.0 Password-Based Key Derivation
#
# Copyright (C) 2007, 2008 Dwayne C. Litzenberger <dlitz@dlitz.net>
# All rights reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR PROVIDES THIS SOFTWARE ``AS IS'' AND ANY EXPRESSED OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Country of origin: Canada
#
###########################################################################
# Sample PBKDF2 usage:
# from Crypto.Cipher import AES
# from PBKDF2 import PBKDF2
# import os
#
# salt = os.urandom(8) # 64-bit salt
# key = PBKDF2("This passphrase is a secret.", salt).read(32) # 256-bit key
# iv = os.urandom(16) # 128-bit IV
# cipher = AES.new(key, AES.MODE_CBC, iv)
# ...
#
# Sample crypt() usage:
# from PBKDF2 import crypt
# pwhash = crypt("secret")
# alleged_pw = raw_input("Enter password: ")
# if pwhash == crypt(alleged_pw, pwhash):
# print "Password good"
# else:
# print "Invalid password"
#
###########################################################################
# History:
#
# 2007-07-27 Dwayne C. Litzenberger <dlitz@dlitz.net>
# - Initial Release (v1.0)
#
# 2007-07-31 Dwayne C. Litzenberger <dlitz@dlitz.net>
# - Bugfix release (v1.1)
# - SECURITY: The PyCrypto XOR cipher (used, if available, in the _strxor
# function in the previous release) silently truncates all keys to 64
# bytes. The way it was used in the previous release, this would only be
# problem if the pseudorandom function that returned values larger than
# 64 bytes (so SHA1, SHA256 and SHA512 are fine), but I don't like
# anything that silently reduces the security margin from what is
# expected.
#
# 2008-06-17 Dwayne C. Litzenberger <dlitz@dlitz.net>
# - Compatibility release (v1.2)
# - Add support for older versions of Python (2.2 and 2.3).
#
###########################################################################
__version__ = "1.2"
from struct import pack
from binascii import b2a_hex
from random import randint
import string
try:
# Use PyCrypto (if available)
from Crypto.Hash import HMAC, SHA as SHA1
except ImportError:
# PyCrypto not available. Use the Python standard library.
import hmac as HMAC
import sha as SHA1
def strxor(a, b):
return "".join([chr(ord(x) ^ ord(y)) for (x, y) in zip(a, b)])
def b64encode(data, chars="+/"):
tt = string.maketrans("+/", chars)
return data.encode('base64').replace("\n", "").translate(tt)
class PBKDF2(object):
"""PBKDF2.py : PKCS#5 v2.0 Password-Based Key Derivation
This implementation takes a passphrase and a salt (and optionally an
iteration count, a digest module, and a MAC module) and provides a
file-like object from which an arbitrarily-sized key can be read.
If the passphrase and/or salt are unicode objects, they are encoded as
UTF-8 before they are processed.
The idea behind PBKDF2 is to derive a cryptographic key from a
passphrase and a salt.
PBKDF2 may also be used as a strong salted password hash. The
'crypt' function is provided for that purpose.
Remember: Keys generated using PBKDF2 are only as strong as the
passphrases they are derived from.
"""
def __init__(self, passphrase, salt, iterations=1000,
digestmodule=SHA1, macmodule=HMAC):
self.__macmodule = macmodule
self.__digestmodule = digestmodule
self._setup(passphrase, salt, iterations, self._pseudorandom)
def _pseudorandom(self, key, msg):
"""Pseudorandom function. e.g. HMAC-SHA1"""
return self.__macmodule.new(key=key, msg=msg,
digestmod=self.__digestmodule).digest()
def read(self, bytes):
"""Read the specified number of key bytes."""
if self.closed:
raise ValueError("file-like object is closed")
size = len(self.__buf)
blocks = [self.__buf]
i = self.__blockNum
while size < bytes:
i += 1
if i > 0xffffffffL or i < 1:
# We could return "" here, but
raise OverflowError("derived key too long")
block = self.__f(i)
blocks.append(block)
size += len(block)
buf = "".join(blocks)
retval = buf[:bytes]
self.__buf = buf[bytes:]
self.__blockNum = i
return retval
def __f(self, i):
# i must fit within 32 bits
assert 1 <= i <= 0xffffffffL
U = self.__prf(self.__passphrase, self.__salt + pack("!L", i))
result = U
for j in xrange(2, 1+self.__iterations):
U = self.__prf(self.__passphrase, U)
result = strxor(result, U)
return result
def hexread(self, octets):
"""Read the specified number of octets. Return them as hexadecimal.
Note that len(obj.hexread(n)) == 2*n.
"""
return b2a_hex(self.read(octets))
def _setup(self, passphrase, salt, iterations, prf):
# Sanity checks:
# passphrase and salt must be str or unicode (in the latter
# case, we convert to UTF-8)
if isinstance(passphrase, unicode):
passphrase = passphrase.encode("UTF-8")
if not isinstance(passphrase, str):
raise TypeError("passphrase must be str or unicode")
if isinstance(salt, unicode):
salt = salt.encode("UTF-8")
if not isinstance(salt, str):
raise TypeError("salt must be str or unicode")
# iterations must be an integer >= 1
if not isinstance(iterations, (int, long)):
raise TypeError("iterations must be an integer")
if iterations < 1:
raise ValueError("iterations must be at least 1")
# prf must be callable
if not callable(prf):
raise TypeError("prf must be callable")
self.__passphrase = passphrase
self.__salt = salt
self.__iterations = iterations
self.__prf = prf
self.__blockNum = 0
self.__buf = ""
self.closed = False
def close(self):
"""Close the stream."""
if not self.closed:
del self.__passphrase
del self.__salt
del self.__iterations
del self.__prf
del self.__blockNum
del self.__buf
self.closed = True
def crypt(word, salt=None, iterations=None):
"""PBKDF2-based unix crypt(3) replacement.
The number of iterations specified in the salt overrides the 'iterations'
parameter.
The effective hash length is 192 bits.
"""
# Generate a (pseudo-)random salt if the user hasn't provided one.
if salt is None:
salt = _makesalt()
# salt must be a string or the us-ascii subset of unicode
if isinstance(salt, unicode):
salt = salt.encode("us-ascii")
if not isinstance(salt, str):
raise TypeError("salt must be a string")
# word must be a string or unicode (in the latter case, we convert to UTF-8)
if isinstance(word, unicode):
word = word.encode("UTF-8")
if not isinstance(word, str):
raise TypeError("word must be a string or unicode")
# Try to extract the real salt and iteration count from the salt
if salt.startswith("$p5k2$"):
(iterations, salt, dummy) = salt.split("$")[2:5]
if iterations == "":
iterations = 400
else:
converted = int(iterations, 16)
if iterations != "%x" % converted: # lowercase hex, minimum digits
raise ValueError("Invalid salt")
iterations = converted
if not (iterations >= 1):
raise ValueError("Invalid salt")
# Make sure the salt matches the allowed character set
allowed = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789./"
for ch in salt:
if ch not in allowed:
raise ValueError("Illegal character %r in salt" % (ch,))
if iterations is None or iterations == 400:
iterations = 400
salt = "$p5k2$$" + salt
else:
salt = "$p5k2$%x$%s" % (iterations, salt)
rawhash = PBKDF2(word, salt, iterations).read(24)
return salt + "$" + b64encode(rawhash, "./")
# Add crypt as a static method of the PBKDF2 class
# This makes it easier to do "from PBKDF2 import PBKDF2" and still use
# crypt.
PBKDF2.crypt = staticmethod(crypt)
def _makesalt():
"""Return a 48-bit pseudorandom salt for crypt().
This function is not suitable for generating cryptographic secrets.
"""
binarysalt = "".join([pack("@H", randint(0, 0xffff)) for i in range(3)])
return b64encode(binarysalt, "./")
def test_pbkdf2():
"""Module self-test"""
from binascii import a2b_hex
#
# Test vectors from RFC 3962
#
# Test 1
result = PBKDF2("password", "ATHENA.MIT.EDUraeburn", 1).read(16)
expected = a2b_hex("cdedb5281bb2f801565a1122b2563515")
if result != expected:
raise RuntimeError("self-test failed")
# Test 2
result = PBKDF2("password", "ATHENA.MIT.EDUraeburn", 1200).hexread(32)
expected = ("5c08eb61fdf71e4e4ec3cf6ba1f5512b"
"a7e52ddbc5e5142f708a31e2e62b1e13")
if result != expected:
raise RuntimeError("self-test failed")
# Test 3
result = PBKDF2("X"*64, "pass phrase equals block size", 1200).hexread(32)
expected = ("139c30c0966bc32ba55fdbf212530ac9"
"c5ec59f1a452f5cc9ad940fea0598ed1")
if result != expected:
raise RuntimeError("self-test failed")
# Test 4
result = PBKDF2("X"*65, "pass phrase exceeds block size", 1200).hexread(32)
expected = ("9ccad6d468770cd51b10e6a68721be61"
"1a8b4d282601db3b36be9246915ec82a")
if result != expected:
raise RuntimeError("self-test failed")
#
# Other test vectors
#
# Chunked read
f = PBKDF2("kickstart", "workbench", 256)
result = f.read(17)
result += f.read(17)
result += f.read(1)
result += f.read(2)
result += f.read(3)
expected = PBKDF2("kickstart", "workbench", 256).read(40)
if result != expected:
raise RuntimeError("self-test failed")
#
# crypt() test vectors
#
# crypt 1
result = crypt("cloadm", "exec")
expected = '$p5k2$$exec$r1EWMCMk7Rlv3L/RNcFXviDefYa0hlql'
if result != expected:
raise RuntimeError("self-test failed")
# crypt 2
result = crypt("gnu", '$p5k2$c$u9HvcT4d$.....')
expected = '$p5k2$c$u9HvcT4d$Sd1gwSVCLZYAuqZ25piRnbBEoAesaa/g'
if result != expected:
raise RuntimeError("self-test failed")
# crypt 3
result = crypt("dcl", "tUsch7fU", iterations=13)
expected = "$p5k2$d$tUsch7fU$nqDkaxMDOFBeJsTSfABsyn.PYUXilHwL"
if result != expected:
raise RuntimeError("self-test failed")
# crypt 4 (unicode)
result = crypt(u'\u0399\u03c9\u03b1\u03bd\u03bd\u03b7\u03c2',
'$p5k2$$KosHgqNo$9mjN8gqjt02hDoP0c2J0ABtLIwtot8cQ')
expected = '$p5k2$$KosHgqNo$9mjN8gqjt02hDoP0c2J0ABtLIwtot8cQ'
if result != expected:
raise RuntimeError("self-test failed")
if __name__ == '__main__':
test_pbkdf2()
# vim:set ts=4 sw=4 sts=4 expandtab:

View File

@@ -0,0 +1,26 @@
from Crypto.Cipher import AES
ZEROIV = "\x00"*16
def removePadding(blocksize, s):
'Remove rfc 1423 padding from string.'
n = ord(s[-1]) # last byte contains number of padding bytes
if n > blocksize or n > len(s):
raise Exception('invalid padding')
return s[:-n]
def AESdecryptCBC(data, key, iv=ZEROIV, padding=False):
if len(data) % 16:
print "AESdecryptCBC: data length not /16, truncating"
data = data[0:(len(data)/16) * 16]
data = AES.new(key, AES.MODE_CBC, iv).decrypt(data)
if padding:
return removePadding(16, data)
return data
def AESencryptCBC(data, key, iv=ZEROIV, padding=False):
if len(data) % 16:
print "AESdecryptCBC: data length not /16, truncating"
data = data[0:(len(data)/16) * 16]
data = AES.new(key, AES.MODE_CBC, iv).encrypt(data)
return data

View File

@@ -0,0 +1,70 @@
import struct
from Crypto.Cipher import AES
"""
http://www.ietf.org/rfc/rfc3394.txt
quick'n'dirty AES wrap implementation
used by iOS 4 KeyStore kernel extension for wrapping/unwrapping encryption keys
"""
def unpack64bit(s):
return struct.unpack(">Q",s)[0]
def pack64bit(s):
return struct.pack(">Q",s)
def AESUnwrap(kek, wrapped):
C = []
for i in xrange(len(wrapped)/8):
C.append(unpack64bit(wrapped[i*8:i*8+8]))
n = len(C) - 1
R = [0] * (n+1)
A = C[0]
for i in xrange(1,n+1):
R[i] = C[i]
for j in reversed(xrange(0,6)):
for i in reversed(xrange(1,n+1)):
todec = pack64bit(A ^ (n*j+i))
todec += pack64bit(R[i])
B = AES.new(kek).decrypt(todec)
A = unpack64bit(B[:8])
R[i] = unpack64bit(B[8:])
#assert A == 0xa6a6a6a6a6a6a6a6, "AESUnwrap: integrity check FAIL, wrong kek ?"
if A != 0xa6a6a6a6a6a6a6a6:
#print "AESUnwrap: integrity check FAIL, wrong kek ?"
return None
res = "".join(map(pack64bit, R[1:]))
return res
def AESwrap(kek, data):
A = 0xa6a6a6a6a6a6a6a6
R = [0]
for i in xrange(len(data)/8):
R.append(unpack64bit(data[i*8:i*8+8]))
n = len(R) - 1
for j in xrange(0,6):
for i in xrange(1,n+1):
B = AES.new(kek).encrypt(pack64bit(A) + pack64bit(R[i]))
A = unpack64bit(B[:8]) ^ (n*j+i)
R[i] = unpack64bit(B[8:])
res = pack64bit(A) + "".join(map(pack64bit, R[1:]))
return res
if __name__ == "__main__":
#format (kek, data, expected_ciphertext)
test_vectors = [
("000102030405060708090A0B0C0D0E0F", "00112233445566778899AABBCCDDEEFF", "1FA68B0A8112B447AEF34BD8FB5A7B829D3E862371D2CFE5"),
("000102030405060708090A0B0C0D0E0F1011121314151617", "00112233445566778899AABBCCDDEEFF", "96778B25AE6CA435F92B5B97C050AED2468AB8A17AD84E5D"),
("000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F", "00112233445566778899AABBCCDDEEFF", "64E8C3F9CE0F5BA263E9777905818A2A93C8191E7D6E8AE7"),
("000102030405060708090A0B0C0D0E0F1011121314151617", "00112233445566778899AABBCCDDEEFF0001020304050607", "031D33264E15D33268F24EC260743EDCE1C6C7DDEE725A936BA814915C6762D2"),
("000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F", "00112233445566778899AABBCCDDEEFF0001020304050607", "A8F9BC1612C68B3FF6E6F4FBE30E71E4769C8B80A32CB8958CD5D17D6B254DA1"),
("000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F", "00112233445566778899AABBCCDDEEFF000102030405060708090A0B0C0D0E0F", "28C9F404C4B810F4CBCCB35CFB87F8263F5786E2D80ED326CBC7F0E71A99F43BFB988B9B7A02DD21")
]
for kek, data, expected in test_vectors:
ciphertext = AESwrap(kek.decode("hex"), data.decode("hex"))
assert ciphertext == expected.decode("hex")
assert AESUnwrap(kek.decode("hex"), ciphertext) == data.decode("hex")
print "All tests OK !"

View File

@@ -0,0 +1,74 @@
from Crypto.Util import number
CURVE_P = (2**255 - 19)
CURVE_A = 121665
def curve25519_monty(x1, z1, x2, z2, qmqp):
a = (x1 + z1) * (x2 - z2) % CURVE_P
b = (x1 - z1) * (x2 + z2) % CURVE_P
x4 = (a + b) * (a + b) % CURVE_P
e = (a - b) * (a - b) % CURVE_P
z4 = e * qmqp % CURVE_P
a = (x1 + z1) * (x1 + z1) % CURVE_P
b = (x1 - z1) * (x1 - z1) % CURVE_P
x3 = a * b % CURVE_P
g = (a - b) % CURVE_P
h = (a + CURVE_A * g) % CURVE_P
z3 = (g * h) % CURVE_P
return x3, z3, x4, z4
def curve25519_mult(n, q):
nqpqx, nqpqz = q, 1
nqx, nqz = 1, 0
for i in range(255, -1, -1):
if (n >> i) & 1:
nqpqx,nqpqz,nqx,nqz = curve25519_monty(nqpqx, nqpqz, nqx, nqz, q)
else:
nqx,nqz,nqpqx,nqpqz = curve25519_monty(nqx, nqz, nqpqx, nqpqz, q)
return nqx, nqz
def curve25519(secret, basepoint):
a = ord(secret[0])
a &= 248
b = ord(secret[31])
b &= 127
b |= 64
s = chr(a) + secret[1:-1] + chr(b)
s = number.bytes_to_long(s[::-1])
basepoint = number.bytes_to_long(basepoint[::-1])
x, z = curve25519_mult(s, basepoint)
zmone = number.inverse(z, CURVE_P)
z = x * zmone % CURVE_P
return number.long_to_bytes(z)[::-1]
if __name__ == "__main__":
from crypto.aeswrap import AESUnwrap
from Crypto.Hash import SHA256
z="04000000080000000200000048000000000000000000000000000000000000000000000002917dc2542198edeb1078c4d1ebab74d9ca87890657ba02b9825dadf20a002f44360c6f87743fac0236df1f9eedbea801e31677aef3a09adfb4e10a37ae27facf419ab3ea3f39f4".decode("hex")
mysecret = "99b66345829d8c05041eea1ba1ed5b2984c3e5ec7a756ef053473c7f22b49f14".decode("hex")
mypublic = "b1c652786697a5feef36a56f36fde524a21193f4e563627977ab515f600fdb3a".decode("hex")
hispublic = z[36:36+32]
#c4d9fe462a2ebbf0745195ce7dc5e8b49947bbd5b42da74175d5f8125b44582b
shared = curve25519(mysecret, hispublic)
print shared.encode("hex")
h = SHA256.new()
h.update('\x00\x00\x00\x01')
h.update(shared)
h.update(hispublic)
h.update(mypublic)
md = h.digest()
#e442c81b91ea876d3cf42d3aea75f4b0c3f90f9fd045e1f5784b91260f3bdc9c
print AESUnwrap(md, z[32+36:]).encode("hex")

View File

@@ -0,0 +1,129 @@
#!/usr/bin/env python
from Crypto.Cipher import AES
from Crypto.Util import strxor
from struct import pack, unpack
def gcm_rightshift(vec):
for x in range(15, 0, -1):
c = vec[x] >> 1
c |= (vec[x-1] << 7) & 0x80
vec[x] = c
vec[0] >>= 1
return vec
def gcm_gf_mult(a, b):
mask = [ 0x80, 0x40, 0x20, 0x10, 0x08, 0x04, 0x02, 0x01 ]
poly = [ 0x00, 0xe1 ]
Z = [0] * 16
V = [c for c in a]
for x in range(128):
if b[x >> 3] & mask[x & 7]:
Z = [V[y] ^ Z[y] for y in range(16)]
bit = V[15] & 1
V = gcm_rightshift(V)
V[0] ^= poly[bit]
return Z
def ghash(h, auth_data, data):
u = (16 - len(data)) % 16
v = (16 - len(auth_data)) % 16
x = auth_data + chr(0) * v + data + chr(0) * u
x += pack('>QQ', len(auth_data) * 8, len(data) * 8)
y = [0] * 16
vec_h = [ord(c) for c in h]
for i in range(0, len(x), 16):
block = [ord(c) for c in x[i:i+16]]
y = [y[j] ^ block[j] for j in range(16)]
y = gcm_gf_mult(y, vec_h)
return ''.join(chr(c) for c in y)
def inc32(block):
counter, = unpack('>L', block[12:])
counter += 1
return block[:12] + pack('>L', counter)
def gctr(k, icb, plaintext):
y = ''
if len(plaintext) == 0:
return y
aes = AES.new(k)
cb = icb
for i in range(0, len(plaintext), aes.block_size):
cb = inc32(cb)
encrypted = aes.encrypt(cb)
plaintext_block = plaintext[i:i+aes.block_size]
y += strxor.strxor(plaintext_block, encrypted[:len(plaintext_block)])
return y
def gcm_decrypt(k, iv, encrypted, auth_data, tag):
aes = AES.new(k)
h = aes.encrypt(chr(0) * aes.block_size)
if len(iv) == 12:
y0 = iv + "\x00\x00\x00\x01"
else:
y0 = ghash(h, '', iv)
decrypted = gctr(k, y0, encrypted)
s = ghash(h, auth_data, encrypted)
t = aes.encrypt(y0)
T = strxor.strxor(s, t)
if T != tag:
raise ValueError('Decrypted data is invalid')
else:
return decrypted
def gcm_encrypt(k, iv, plaintext, auth_data):
aes = AES.new(k)
h = aes.encrypt(chr(0) * aes.block_size)
if len(iv) == 12:
y0 = iv + "\x00\x00\x00\x01"
else:
y0 = ghash(h, '', iv)
encrypted = gctr(k, y0, plaintext)
s = ghash(h, auth_data, encrypted)
t = aes.encrypt(y0)
T = strxor.strxor(s, t)
return (encrypted, T)
def main():
#http://www.ieee802.org/1/files/public/docs2011/bn-randall-test-vectors-0511-v1.pdf
k = 'AD7A2BD03EAC835A6F620FDCB506B345'.decode("hex")
p = ''
a = 'D609B1F056637A0D46DF998D88E5222AB2C2846512153524C0895E8108000F101112131415161718191A1B1C1D1E1F202122232425262728292A2B2C2D2E2F30313233340001'.decode("hex")
iv = '12153524C0895E81B2C28465'.decode("hex")
c, t = gcm_encrypt(k, iv, '', a)
assert c == ""
assert t == "f09478a9b09007d06f46e9b6a1da25dd".decode("hex")
k = 'AD7A2BD03EAC835A6F620FDCB506B345'.decode("hex")
p = '08000F101112131415161718191A1B1C1D1E1F202122232425262728292A2B2C2D2E2F303132333435363738393A0002'.decode("hex")
a = 'D609B1F056637A0D46DF998D88E52E00B2C2846512153524C0895E81'.decode("hex")
iv = '12153524C0895E81B2C28465'.decode("hex")
c, t = gcm_encrypt(k, iv, p, a)
assert c == '701AFA1CC039C0D765128A665DAB69243899BF7318CCDC81C9931DA17FBE8EDD7D17CB8B4C26FC81E3284F2B7FBA713D'.decode("hex")
assert t == '4F8D55E7D3F06FD5A13C0C29B9D5B880'.decode("hex")
key = "91bfb6cbcff07b93a4c68bbfe99ac63b713f0627025c0fb1ffc5b0812dc284f8".decode("hex")
data = "020000000B00000028000000DE44D22E96B1966BAEF4CBEA8675871D40BA669401BD4EBB52AF9C025134187E70549012058456BF0EC0FA1F8FF9F822AC4312AB2141FA712E6D1482358EAC1421A1BFFA81EF38BD0BF2E52675D665EFE3C534E188F575774FAA92E74345575E370B9982661FAE8BD9243B7AD7D2105B275424C0CA1145B9D43AFF04F2747E40D62EC60563960D62A894BE66F267B14D75C0572BE60CC9B339D440FCB418D4F729BBF15C14E0D3A43E4A8B44523D8B3B0F3E7DF85AA67A707EE19CB893277D2392234D7DBC17DA4A0BD7F166189FC54C16C20D287E20FD2FB11BD2CE09ADBDABB95124CD4BFE219E34D3C80E69570A5A506555D7094916C5D75E0065F1796F556EDF0DAA1AA758E0C85AE3951BD363F26B1D43F6CBAEE12D97AD3B60CFA89C1C76BB29F2B54BE31B6CE166F4860C5E5DA92588EF53AA946DF159E60E6F05009D12FB1E37".decode("hex")
ciphertext = data[12+40:-16]
tag = data[-16:]
print repr(gcm_decrypt(key, '', ciphertext, '', tag))
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,156 @@
#!/usr/bin/env python
import sys, os
from PyQt4 import QtGui, QtCore
from backups.backup4 import MBDB
from keychain.keychain4 import Keychain4
from util.bplist import BPlistReader
from keystore.keybag import Keybag
from util import readPlist
class KeychainTreeWidget(QtGui.QTreeWidget):
def __init__(self, parent=None):
QtGui.QTreeWidget.__init__(self, parent)
self.setGeometry(10, 10, 780, 380)
self.header().hide()
self.setColumnCount(2)
class KeychainTreeWidgetItem(QtGui.QTreeWidgetItem):
def __init__(self, title):
QtGui.QTreeWidgetItem.__init__(self, [title])
fnt = self.font(0)
fnt.setBold(True)
self.setFont(0, fnt)
self.setColors()
def setText(self, column, title):
QtGui.QTreeWidgetItem.setText(self, column, title)
def setColors(self):
self.setForeground(0, QtGui.QBrush(QtGui.QColor(80, 80, 80)))
self.setBackground(0, QtGui.QBrush(QtGui.QColor(230, 230, 230)))
self.setBackground(1, QtGui.QBrush(QtGui.QColor(230, 230, 230)))
class LockedKeychainTreeWidgetItem(KeychainTreeWidgetItem):
def setColors(self):
self.setForeground(0, QtGui.QBrush(QtGui.QColor(255, 80, 80)))
self.setBackground(0, QtGui.QBrush(QtGui.QColor(255, 230, 230)))
self.setBackground(1, QtGui.QBrush(QtGui.QColor(255, 230, 230)))
class KeychainWindow(QtGui.QWidget):
def __init__(self, parent=None):
QtGui.QWidget.__init__(self, parent)
self.setGeometry(100, 100, 800, 400)
self.setWindowTitle('Keychain Explorer')
self.passwordTree = KeychainTreeWidget(parent=self)
def setGenericPasswords(self, pwds):
self.genericPasswords = pwds
self.passwordItems = KeychainTreeWidgetItem('Generic Passwords')
for pwd in self.genericPasswords:
if not pwd.has_key('acct'):
continue
if len(pwd['acct']) > 0:
item_title = '%s (%s)' % (pwd['svce'], pwd['acct'])
else:
item_title = pwd['svce']
if pwd['data'] is None:
item = LockedKeychainTreeWidgetItem(item_title)
else:
item = KeychainTreeWidgetItem(item_title)
item.addChild(QtGui.QTreeWidgetItem(['Service', pwd['svce']]))
item.addChild(QtGui.QTreeWidgetItem(['Account', pwd['acct']]))
if pwd['data'] is not None:
item.addChild(QtGui.QTreeWidgetItem(['Data', pwd['data']]))
else:
item.addChild(QtGui.QTreeWidgetItem(['Data', 'N/A']))
item.addChild(QtGui.QTreeWidgetItem(['Access Group', pwd['agrp']]))
self.passwordItems.addChild(item)
self.passwordTree.addTopLevelItem(self.passwordItems)
self.passwordTree.expandAll()
self.passwordTree.resizeColumnToContents(0)
def setInternetPasswords(self, pwds):
self.internetPasswords = pwds
self.internetPasswordItems = KeychainTreeWidgetItem('Internet Passwords')
for pwd in pwds:
item_title = '%s (%s)' % (pwd['srvr'], pwd['acct'])
item = KeychainTreeWidgetItem(item_title)
item.addChild(QtGui.QTreeWidgetItem(['Server', pwd['srvr']]))
item.addChild(QtGui.QTreeWidgetItem(['Account', pwd['acct']]))
if pwd['data'] is not None:
item.addChild(QtGui.QTreeWidgetItem(['Data', pwd['data']]))
else:
item.addChild(QtGui.QTreeWidgetItem(['Data', 'N/A']))
item.addChild(QtGui.QTreeWidgetItem(['Port', str(pwd['port'])]))
item.addChild(QtGui.QTreeWidgetItem(['Access Group', pwd['agrp']]))
self.internetPasswordItems.addChild(item)
self.passwordTree.addTopLevelItem(self.internetPasswordItems)
self.passwordTree.expandAll()
self.passwordTree.resizeColumnToContents(0)
def warn(msg):
print "WARNING: %s" % msg
def getBackupKeyBag(backupfolder, passphrase):
manifest = readPlist(backupfolder + "/Manifest.plist")
kb = Keybag(manifest["BackupKeyBag"].data)
if kb.unlockBackupKeybagWithPasscode(passphrase):
print "BackupKeyBag unlock OK"
return kb
else:
return None
def main():
app = QtGui.QApplication(sys.argv)
init_path = "{0:s}/Apple Computer/MobileSync/Backup".format(os.getenv('APPDATA'))
dirname = QtGui.QFileDialog.getExistingDirectory(None, "Select iTunes backup directory", init_path)
kb = getBackupKeyBag(dirname, 'pouet') #XXX: hardcoded password for demo
if not kb:
warn("Backup keybag unlock fail : wrong passcode?")
return
db = MBDB(dirname)
db.keybag = kb
filename, record = db.get_file_by_name("keychain-backup.plist")
keychain_data = db.read_file(filename, record)
f = file('keychain.tmp', 'wb')
f.write(keychain_data)
f.close()
kc = Keychain4('keychain.tmp', kb)
pwds = kc.get_passwords()
inet_pwds = kc.get_inet_passwords()
qb = KeychainWindow()
qb.setGenericPasswords(pwds)
qb.setInternetPasswords(inet_pwds)
qb.show()
sys.exit(app.exec_())
pass
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,88 @@
import plistlib
import os
from keystore.keybag import Keybag
from keychain.keychain4 import Keychain4
from keychain.managedconfiguration import bruteforce_old_pass
from util.ramdiskclient import RamdiskToolClient
from util import write_file
def bf_system():
curdir = os.path.dirname(os.path.abspath(__file__))
client = RamdiskToolClient()
di = client.getDeviceInfos()
devicedir = di["udid"]
if os.getcwd().find(devicedir) == -1:
try:
os.mkdir(devicedir)
except:
pass
os.chdir(devicedir)
key835 = di.get("key835").decode("hex")
systembag = client.getSystemKeyBag()
kbkeys = systembag["KeyBagKeys"].data
kb = Keybag.createWithDataSignBlob(kbkeys, key835)
keybags = di.setdefault("keybags", {})
kbuuid = kb.uuid.encode("hex")
print "Keybag UUID :", kbuuid
if True and keybags.has_key(kbuuid) and keybags[kbuuid].has_key("passcodeKey"):
print "We've already seen this keybag"
passcodeKey = keybags[kbuuid].get("passcodeKey").decode("hex")
print kb.unlockWithPasscodeKey(passcodeKey)
kb.printClassKeys()
else:
keybags[kbuuid] = {"KeyBagKeys": systembag["KeyBagKeys"]}
di["KeyBagKeys"] = systembag["KeyBagKeys"]
di.save()
print "Enter passcode or leave blank for bruteforce:"
z = raw_input()
res = client.getPasscodeKey(systembag["KeyBagKeys"].data, z)
if kb.unlockWithPasscodeKey(res.get("passcodeKey").decode("hex")):
print "Passcode \"%s\" OK" % z
di.update(res)
keybags[kbuuid].update(res)
di.save()
keychain_blob = client.downloadFile("/mnt2/Keychains/keychain-2.db")
write_file("keychain-2.db", keychain_blob)
print "Downloaded keychain database, use keychain_tool.py to decrypt secrets"
return
if z != "":
print "Wrong passcode, trying to bruteforce !"
if kb.passcodeComplexity == 0:
print "Trying all 4-digits passcodes..."
bf = client.bruteforceKeyBag(systembag["KeyBagKeys"].data)
if bf:
di.update(bf)
keybags[kbuuid].update(bf)
print bf
print kb.unlockWithPasscodeKey(bf.get("passcodeKey").decode("hex"))
kb.printClassKeys()
di["classKeys"] = kb.getClearClassKeysDict()
di.save()
else:
print "Complex passcode used, trying dictionary attack ..."
dictfile = os.path.join(curdir, 'wordlist.dict')
try:
wordlist = open(dictfile, 'r').readlines()
except (OSError, IOError), e:
exit(e)
for line in wordlist:
res = client.getPasscodeKey(systembag["KeyBagKeys"].data, line.rstrip('\n'))
if kb.unlockWithPasscodeKey(res.get("passcodeKey").decode("hex")):
print "Passcode \"%s\" OK" % line.rstrip('\n')
di.update(res)
keybags[kbuuid].update(res)
di.save()
keychain_blob = client.downloadFile("/mnt2/Keychains/keychain-2.db")
write_file("keychain-2.db", keychain_blob)
print "Downloaded keychain database, use keychain_tool.py to decrypt secrets"
return
print "Passcode not found!"
return
#keychain_blob = client.downloadFile("/private/var/Keychains/keychain-2.db")
keychain_blob = client.downloadFile("/mnt2/Keychains/keychain-2.db")
write_file("keychain-2.db", keychain_blob)
print "Downloaded keychain database, use keychain_tool.py to decrypt secrets"
bf_system()

View File

@@ -0,0 +1,38 @@
import os
import plistlib
from keystore.keybag import Keybag
from util.ramdiskclient import RamdiskToolClient
"""
this wont work on iOS 5 unless the passcode was already bruteforced
"""
def escrow():
client = RamdiskToolClient()
di = client.getDeviceInfos()
key835 = di.get("key835").decode("hex")
plist = os.environ["ALLUSERSPROFILE"] + "/Apple/Lockdown/%s.plist" % di["udid"]
lockdown = plistlib.readPlist(plist)
kb = Keybag.createWithDataSignBlob(lockdown["EscrowBag"].data, key835)
keybags = di.setdefault("keybags", {})
kbuuid = kb.uuid.encode("hex")
if not keybags.has_key(kbuuid):
print lockdown["HostID"]
res = client.getEscrowRecord(lockdown["HostID"])
bagkey = res.get("BagKey")
print "Bag key" + bagkey.data.encode("hex")
res = client.getPasscodeKey(lockdown["EscrowBag"].data, bagkey)
print res
passcodeKey = res["passcodeKey"].decode("hex")
keybags[kbuuid] = {"KeyBagKeys": lockdown["EscrowBag"],
"passcode": bagkey,
"passcodeKey": passcodeKey.encode("hex")}
pl.update(keybags[kbuuid])
else:
passcodeKey = keybags[kbuuid].get("passcodeKey").decode("hex")
print kb.unlockWithPasscodeKey(passcodeKey)
kb.printClassKeys()
escrow()

View File

@@ -0,0 +1,34 @@
from optparse import OptionParser
from hfs.emf import EMFVolume
from util.bdev import FileBlockDevice
import plistlib
def main():
parser = OptionParser(usage="emf_decrypter.py disk_image.bin")
parser.add_option("-w", "--nowrite", dest="write", action="store_false", default=True,
help="disable modifications of input file, for testing")
(options, args) = parser.parse_args()
if len(args) < 1:
parser.print_help()
return
device_infos = None
if len(args) >= 2: device_infos = plistlib.readPlist(args[1])
p = FileBlockDevice(args[0], 0, options.write)
v = EMFVolume(p, device_infos)
if not v.keybag.unlocked:
print "Keybag locked, protected files won't be decrypted, continue anyway ?"
if raw_input() == "n":
return
if options.write:
print "WARNING ! This tool will modify the hfs image and possibly wreck it if something goes wrong !"
print "Make sure to backup the image before proceeding"
print "You can use the --nowrite option to do a dry run instead"
else:
print "Test mode : the input file will not be modified"
print "Press a key to continue or CTRL-C to abort"
raw_input()
v.decryptAllFiles()
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,25 @@
import os
import sys
from hfs.emf import EMFVolume
from hfs.journal import do_emf_carving
from util.bdev import FileBlockDevice
if __name__ == "__main__":
if len(sys.argv) < 2:
print "Usage: emf_undelete.py disk_image.bin"
sys.exit(0)
filename = sys.argv[1]
volume = EMFVolume(FileBlockDevice(filename), None)
dirname = os.path.dirname(filename)
if dirname == "":
dirname = "."
outdir = dirname + "/" + volume.volumeID().encode("hex") + "_" + os.path.basename(filename)
carveokdir = outdir + "/undelete/"
carvenokdir = outdir + "/junk/"
try:
os.makedirs(carveokdir)
os.makedirs(carvenokdir)
except:
pass
do_emf_carving(volume, carveokdir, carvenokdir)

View File

@@ -0,0 +1,12 @@
from construct.core import Struct
from construct.macros import *
IMG2 = Struct("IMG2",
String("magic",4),
ULInt32("block_size"),
ULInt32("images_offset"),
ULInt32("images_block"),
ULInt32("images_length"),
Padding(0x1C),
ULInt32("crc32"),
)

View File

@@ -0,0 +1,292 @@
from crypto.aes import AESdecryptCBC
from util import read_file, write_file
from util.ramdiskclient import RamdiskToolClient
import M2Crypto
import struct
import hashlib
import os
import sys
def decryptGID(data):
try:
client = RamdiskToolClient.get()
except:
return None
r = client.aesGID(data)
if r and r.has_key("data"):
return r.data.data
return None
def decryptPseudoGID(data):
pseudogid = "5F650295E1FFFC97CE77ABD49DD955B3".decode("hex")
return AESdecryptCBC(data, pseudogid, padding=False)
def dword(s,i):
return struct.unpack("<L", s[i:i+4])[0]
def extract_img3s(blob):
i = 0
res = []
while i < len(blob):
if blob[i:i+4] != "3gmI":
break
TYPE = blob[i+16:i+20][::-1]
l = struct.unpack("<L", blob[i+4:i+8])[0]
data = blob[i:i+l]
img3 = Img3(TYPE, data)
res.append(img3)
i += l
return res
class Img3:
INT_FIELDS = ["SEPO", "SDOM", "BORD", "CHIP", "PROD"]
rootCert = None
def __init__(self, filename, data=None):
self.filename = filename
self.shortname = os.path.basename(filename)
self.certs = None
if not data:
img3 = read_file(filename)
else:
img3 = data
self.img3 = img3
self.ecidoffset = 0
if img3[0:4] != '3gmI':
print "Magic 3gmI not found in " + filename
return
fullSize = dword(img3, 4)
sizeNoPack = dword(img3, 8)
sigCheckArea = dword(img3, 12)
self.sha1 = hashlib.sha1(img3)
self.fileHash = hashlib.sha1(img3[12:20+sigCheckArea])
i = 20
sections = {}
while i < fullSize:
tag = img3[i:i+4][::-1] #reverse fourcc tag
total_length = dword(img3, i+4)
data_length = dword(img3, i+8)
if tag == "DATA":
self.datalen = data_length
data = img3[i+12:i+total_length]
else:
data = img3[i+12:i+12+data_length]
if tag in Img3.INT_FIELDS:
data = struct.unpack("<L", data)[0]
elif tag == "VERS":
data = data[4:]
elif tag == "TYPE":
data = data[::-1]
elif tag == "ECID":
self.ecidoffset = i
#print "%s offset=%x len=%x" % (tag,i, data_length)
if tag != "KBAG" or dword(data,0) == 1:
sections[tag] = data
i += total_length
self.sections = sections
self.leaf_cert = None
self.sig = None
self.key = ""
self.iv = ""
self.extractCertificates()
#self.sigcheck()
def isEncrypted(self):
return self.sections.has_key("KBAG")
@staticmethod
def setRootCert(filename):
try:
Img3.rootCert = M2Crypto.X509.load_cert_der_string(open(filename,"rb").read())
except:
print "IMG3.setRootCert failed loading %s" % filename
def extractCertificates(self):
if not self.sections.has_key("CERT"):
return
certs = {}
i = 0
while i < len(self.sections["CERT"]):
data = self.sections["CERT"][i:]
cert = M2Crypto.X509.load_cert_der_string(data)
name = cert.get_subject().as_text()
#name = name[name.find("CN=")+3:]
#print name
certs[name] = cert
i += len(cert.as_der())
#XXX nested Img3 in leaf cert 1.2.840.113635.100.6.1.1
#CFTypeRef kSecOIDAPPLE_EXTENSION_APPLE_SIGNING = CFSTR("1.2.840.113635.100.6.1.1");
z = data.find("3gmI")
if z != -1:
zz = Img3("cert", data[z:])
self.sections.update(zz.sections)
#assume leaf cert is last
self.certs = certs
self.leaf_cert = cert
self.leaf_name = name
def writeCerts(self):
if not self.certs:
self.extractCertificates()
for key, cert in self.certs.items():
cert_data = cert.as_der()
cert_sha1 = hashlib.sha1(cert_data).hexdigest()
write_file("%s_%s.crt" % (key, cert_sha1), cert_data)
"""
Decrypt SHSH section with leaf certificate public key
output should be the SHA1 of img3[12:20+sigCheckArea]
"""
def sigcheck(self, k89A=None):
if not self.sections.has_key("SHSH"):
print "[x] FAIL sigcheck %s : no SHSH section" % self.shortname
return False
if not self.leaf_cert:
#print "Extracting certificates"
self.extractCertificates()
cert = self.leaf_cert
#print "Leaf cert subject: %s" % cert.get_subject()
certChainOk = False
while True:
issuer = cert.get_issuer().as_text()
#print "issuer: %s" % issuer
if not self.certs.has_key(issuer):
if not Img3.rootCert:
print "Cert chain stops at %s" % issuer
certChainOk = False
break
#print "Verifying cert.",
certChainOk = cert.verify(Img3.rootCert.get_pubkey())
break
issuer = self.certs[issuer]
if not cert.verify(issuer.get_pubkey()):
print "%s is not signed by %s (verify fail)" % (cert.get_subject().as_text(), issuer.get_subject().as_text())
return False
cert = issuer
shsh = self.sections["SHSH"]
print "Got SHSH"
try:
sig = self.leaf_cert.get_pubkey().get_rsa().public_decrypt(shsh, M2Crypto.RSA.pkcs1_padding)
except:
if k89A == None:
print "SHSH RSA decrypt FAIL, IMG3 must be personalized (SHSH encrypted with k89A)"
return False
try:
shsh = AESdecryptCBC(shsh, k89A)
sig = self.leaf_cert.get_pubkey().get_rsa().public_decrypt(shsh, M2Crypto.RSA.pkcs1_padding)
except:
raise
return False
#DigestInfo SHA1 http://www.ietf.org/rfc/rfc3447.txt
sha1_digestInfo = "3021300906052b0e03021a05000414".decode("hex")
if sig[:len(sha1_digestInfo)] == sha1_digestInfo:
pass#print "DigestInfo SHA1 OK"
self.sig = sig = sig[len(sha1_digestInfo):]
ok = sig == self.fileHash.digest()
if ok:
print "%s : signature check OK (%s)" % (self.shortname, self.leaf_name)
else:
print "Signature check for %s failed" % self.shortname
print "Decrypted SHA1 " + sig.encode("hex")
print "Sigcheck area SHA1 " + self.fileHash.hexdigest()
return ok
def ticketHash(self):
#sigchecklen = struct.unpack("<L", self.img3[12:16])[0]
tohash = struct.pack("<L", self.ecidoffset - 20) + self.img3[16:12 + self.ecidoffset - 20+8]
return hashlib.sha1(tohash).digest()
def setIvAndKey(self, iv, key):
self.iv = iv
self.key = key
def decryptKBAG(self):
if self.iv and self.key:
print "No need to decrypt KBAG"
return
if not self.sections.has_key("KBAG"):
print "FAIL: decrypt_kbag no KBAG section for %s" % self.filename
return
kbag = self.sections["KBAG"]
cryptState = dword(kbag,0)
if cryptState != 1:
print "FAIL: cryptState = %d" % cryptState
aesType = dword(kbag,4)
if aesType != 128 and aesType != 192 and aesType != 256:
print "FAIL: aesType = %d" % aesType
keySize = aesType / 8
#print "KBAG keySize = " + str(keySize)
#print "KBAG = %s" % kbag.encode("hex")
#kbag_dec = decryptPseudoGID(kbag[8:8+16+keySize])
kbag_dec = decryptGID(kbag[8:8+16+keySize])
if not kbag_dec:
return False
self.iv = kbag_dec[:16]
self.key = kbag_dec[16:]
return True
def isValidDecryptedData(self, data):
if len(data) > 16 and data.startswith("complzss"):
return "kernel"
if len(data) > 0x800 and data[0x400:0x400+2] == "H+":
return "ramdisk"
if len(data) > 0x300 and data[0x280:0x285] == "iBoot":
return "bootloader";
if data.find("serial-number") != -1:
return "devicetree"
if data.startswith("iBootIm"):
return "bootlogo"
def getRawData(self):
return self.sections["DATA"][:self.datalen]
def decryptData(self, key=None, iv=None):
if not self.sections.has_key("KBAG"):
return self.getRawData()
if not key or not iv:
if not self.decryptKBAG():
return
key = self.key
iv = self.iv
data = AESdecryptCBC(self.sections["DATA"], key, iv)
x = self.isValidDecryptedData(data)
if not x:
print >> sys.stderr, "%s : Decrypted data seems invalid" % self.shortname
print >> sys.stderr, data[:50].encode("hex")
return False
print "%s : decrypted OK (%s)" % (self.shortname, x)
return data[:self.datalen]
if __name__ == "__main__":
img3 = Img3(sys.argv[1])
img3.sigcheck()

View File

@@ -0,0 +1,27 @@
from construct.core import Struct
from construct.macros import *
from construct import RepeatUntil, OneOf
from util import hexdump
SCFGItem = Struct("SCFGItem",
String("tag", 4),
String("data", 16, padchar="\x00")
)
SCFG = Struct("SCFG",
OneOf(String("magic", 4), ["gfCS"]),
ULInt32("length"),
ULInt32("unk1"),
ULInt32("unk2"),
ULInt32("unk3"),
ULInt32("unk4")
)
def parse_SCFG(data):
res = {}
scfg = SCFG.parse(data)
assert scfg.length > 0x18
for i in Array((scfg.length - 0x18) / 20, SCFGItem).parse(data[0x18:scfg.length]):
if i.tag != "\xFF\xFF\xFF\xFF":
res[str(i.tag)[::-1]] = str(i.data)
return res

View File

@@ -0,0 +1,268 @@
from structs import *
"""
Probably buggy
HAX, only works on case SENSITIVE
"""
class BTree(object):
def __init__(self, file, keyStruct, dataStruct):
self.file = file
self.keyStruct = keyStruct
self.dataStruct = dataStruct
block0 = self.file.readBlock(0)
btnode = BTNodeDescriptor.parse(block0)
assert btnode.kind == kBTHeaderNode
self.header = BTHeaderRec.parse(block0[BTNodeDescriptor.sizeof():])
assert self.header.keyCompareType == 0 or self.header.keyCompareType == 0 or kHFSBinaryCompare
#TODO: do more testing when nodeSize != blockSize
self.nodeSize = self.header.nodeSize
self.nodesInBlock = file.blockSize / self.header.nodeSize
self.blocksForNode = self.header.nodeSize / file.blockSize
#print file.blockSize , self.header.nodeSize
self.lastRecordNumber = 0
type, (hdr, maprec) = self.readBtreeNode(0)
assert len(maprec) == self.nodeSize - 256
if self.header.totalNodes / 8 > len(maprec):
pass #TODO: handle map records
self.maprec = maprec
def isNodeInUse(self, nodeNumber):
thisByte = ord(self.maprec[nodeNumber / 8])
return (thisByte & (1 << (7 - (nodeNumber % 8)))) != 0
def readEmptySpace(self):
res = ""
z = 0
for i in xrange(self.header.totalNodes):
if not self.isNodeInUse(i):
z += 1
res += self.readNode(i)
assert z == self.header.freeNodes
return res
#convert construct structure to tuple
def getComparableKey(self, k):
raise Exception("implement in subclass")
def compareKeys(self, k1, k2):
k2 = self.getComparableKey(k2)
if k1 == k2:
return 0
return -1 if k1 < k2 else 1
def printLeaf(self, key, data):
print key, data
def readNode(self, nodeNumber):
node = ""
for i in xrange(self.blocksForNode):
node += self.file.readBlock(nodeNumber * self.blocksForNode + i)
return node
def readBtreeNode(self, nodeNumber):
self.lastnodeNumber = nodeNumber
node = self.readNode(nodeNumber)
self.lastbtnode = btnode = BTNodeDescriptor.parse(node)
if btnode.kind == kBTHeaderNode:
assert btnode.numRecords == 3
end = self.nodeSize - 8 #2*4
offsets = Array(btnode.numRecords+1, UBInt16("off")).parse(node[end:])
assert offsets[-4] == end
hdr = BTHeaderRec.parse(node[BTNodeDescriptor.sizeof():])
maprec = node[offsets[-3]:end]
return kBTHeaderNode, [hdr, maprec]
elif btnode.kind == kBTIndexNode:
recs = []
offsets = Array(btnode.numRecords, UBInt16("off")).parse(node[-2*btnode.numRecords:])
for i in xrange(btnode.numRecords):
off = offsets[btnode.numRecords-i-1]
k = self.keyStruct.parse(node[off:])
off += 2 + k.keyLength
k.childNode = UBInt32("nodeNumber").parse(node[off:off+4])
recs.append(k)
return kBTIndexNode, recs
elif btnode.kind == kBTLeafNode:
recs = []
offsets = Array(btnode.numRecords, UBInt16("off")).parse(node[-2*btnode.numRecords:])
for i in xrange(btnode.numRecords):
off = offsets[btnode.numRecords-i-1]
k = self.keyStruct.parse(node[off:])
off += 2 + k.keyLength
d = self.dataStruct.parse(node[off:])
recs.append((k,d))
return kBTLeafNode, recs
else:
raise Exception("Invalid node type " + str(btnode))
def search(self, searchKey, node=None):
if node == None:
node = self.header.rootNode
type, stuff = self.readBtreeNode(node)
if len(stuff) == 0:
return None, None
if type == kBTIndexNode:
for i in xrange(len(stuff)):
if self.compareKeys(searchKey, stuff[i]) < 0:
if i > 0:
i = i - 1
return self.search(searchKey, stuff[i].childNode)
return self.search(searchKey, stuff[len(stuff)-1].childNode)
elif type == kBTLeafNode:
self.lastRecordNumber = 0
for k,v in stuff:
res = self.compareKeys(searchKey, k)
if res == 0:
return k, v
if res < 0:
return None, None
self.lastRecordNumber += 1
return None, None
def traverse(self, node=None, count=0, callback=None):
if node == None:
node = self.header.rootNode
type, stuff = self.readBtreeNode(node)
if type == kBTIndexNode:
for i in xrange(len(stuff)):
count += self.traverse(stuff[i].childNode, callback=callback)
elif type == kBTLeafNode:
for k,v in stuff:
if callback:
callback(k,v)
else:
self.printLeaf(k, v)
count += 1
return count
def traverseLeafNodes(self, callback=None):
nodeNumber = self.header.firstLeafNode
count = 0
while nodeNumber != 0:
_, stuff = self.readBtreeNode(nodeNumber)
count += len(stuff)
for k,v in stuff:
if callback:
callback(k,v)
else:
self.printLeaf(k, v)
nodeNumber = self.lastbtnode.fLink
return count
#XXX
def searchMultiple(self, searchKey, filterKeyFunction=lambda x:False):
self.search(searchKey)
nodeNumber = self.lastnodeNumber
recordNumber = self.lastRecordNumber
kv = []
while nodeNumber != 0:
_, stuff = self.readBtreeNode(nodeNumber)
for k,v in stuff[recordNumber:]:
if filterKeyFunction(k):
kv.append((k,v))
else:
return kv
nodeNumber = self.lastbtnode.fLink
recordNumber = 0
return kv
def getLBAsHax(self):
nodes = [self.lastnodeNumber]
n = self.lastbtnode
for i in xrange(2):
nodes.append(self.lastbtnode.bLink)
self.readBtreeNode(self.lastbtnode.bLink)
self.lastbtnode = n
for i in xrange(2):
nodes.append(self.lastbtnode.fLink)
self.readBtreeNode(self.lastbtnode.fLink)
res = []
for n in nodes:
res.append(self.file.getLBAforBlock(n * self.blocksForNode))
return res
class CatalogTree(BTree):
def __init__(self, file, volume):
super(CatalogTree,self).__init__(file, HFSPlusCatalogKey, HFSPlusCatalogData)
self.volume = volume
def printLeaf(self, k, d):
if d.recordType == kHFSPlusFolderRecord or d.recordType == kHFSPlusFileRecord:
print getString(k)
def getComparableKey(self, k2):
#XXX http://dubeiko.com/development/FileSystems/HFSPLUS/tn1150.html#StringComparisonAlgorithm
return (k2.parentID, getString(k2))
def searchByCNID(self, cnid):
threadk, threadd = self.search((cnid, ""))
return self.search((threadd.data.parentID, getString(threadd.data))) if threadd else (None, None)
def getFolderContents(self, cnid):
return self.searchMultiple((cnid, ""), lambda k:k.parentID == cnid)
def getRecordFromPath(self, path):
if not path.startswith("/"):
return None, None
if path == "/":
return self.searchByCNID(kHFSRootFolderID)
parentId=kHFSRootFolderID
i = 1
k, v = None, None
for p in path.split("/")[1:]:
if p == "":
break
k,v = self.search((parentId, p))
if (k,v) == (None, None):
return None, None
if v.recordType == kHFSPlusFolderRecord:
parentId = v.data.folderID
elif v.recordType == kHFSPlusFileRecord and is_symlink(v.data):
linkdata = self.volume.readFileByRecord(v)
print "symlink %s => %s" % (p, linkdata)
if not linkdata:
return None, None
t = path.split("/")
t[i] = linkdata
newpath = "/".join(t)
return self.getRecordFromPath(newpath)
else:
break
i += 1
return k,v
class ExtentsOverflowTree(BTree):
def __init__(self, file):
super(ExtentsOverflowTree,self).__init__(file, HFSPlusExtentKey, HFSPlusExtentRecord)
def getComparableKey(self, k2):
return (k2.fileID, k2.forkType, k2.startBlock)
def searchExtents(self, fileID, forkType, startBlock):
return self.search((fileID, forkType, startBlock))
class AttributesTree(BTree):
def __init__(self, file):
super(AttributesTree,self).__init__(file, HFSPlusAttrKey, HFSPlusAttrData)
def printLeaf(self, k, d):
print k.fileID, getString(k), d.data.encode("hex")
def getComparableKey(self, k2):
return (k2.fileID, getString(k2))
def searchXattr(self, fileID, name):
k,v = self.search((fileID, name))
return v.data if v else None
def getAllXattrs(self, fileID):
res = {}
for k,v in self.searchMultiple((fileID, ""), lambda k:k.fileID == fileID):
res[getString(k)] = v.data
return res

View File

@@ -0,0 +1,220 @@
from construct import Struct, ULInt16, ULInt32, String
from construct.macros import ULInt64, Padding, If
from crypto.aes import AESencryptCBC, AESdecryptCBC
from hfs import HFSVolume, HFSFile
from keystore.keybag import Keybag
from structs import HFSPlusVolumeHeader, kHFSPlusFileRecord, getString, \
kHFSRootParentID
from util import search_plist
from util.bruteforce import loadKeybagFromVolume
import hashlib
import os
import plistlib
import struct
"""
iOS >= 4 raw images
http://opensource.apple.com/source/xnu/xnu-1699.22.73/bsd/hfs/hfs_cprotect.c
http://opensource.apple.com/source/xnu/xnu-1699.22.73/bsd/sys/cprotect.h
"""
cp_root_xattr = Struct("cp_root_xattr",
ULInt16("major_version"),
ULInt16("minor_version"),
ULInt64("flags"),
ULInt32("reserved1"),
ULInt32("reserved2"),
ULInt32("reserved3"),
ULInt32("reserved4")
)
cprotect_xattr = Struct("cprotect_xattr",
ULInt16("xattr_major_version"),
ULInt16("xattr_minor_version"),
ULInt32("flags"),
ULInt32("persistent_class"),
ULInt32("key_size"),
If(lambda ctx: ctx["xattr_major_version"] >= 4, Padding(20)),
String("persistent_key", length=lambda ctx: ctx["key_size"])
)
NSProtectionNone = 4
PROTECTION_CLASSES={
1:"NSFileProtectionComplete",
2:"NSFileProtectionCompleteUnlessOpen",
3:"NSFileProtectionCompleteUntilFirstUserAuthentication",
4:"NSFileProtectionNone",
5:"NSFileProtectionRecovery?"
}
#HAX: flags set in finderInfo[3] to tell if the image was already decrypted
FLAG_DECRYPTING = 0x454d4664 #EMFd big endian
FLAG_DECRYPTED = 0x454d4644 #EMFD big endian
class EMFFile(HFSFile):
def __init__(self, volume, hfsplusfork, fileID, filekey, deleted=False):
super(EMFFile,self).__init__(volume, hfsplusfork, fileID, deleted)
self.filekey = filekey
self.ivkey = None
self.decrypt_offset = 0
if volume.cp_major_version == 4:
self.ivkey = hashlib.sha1(filekey).digest()[:16]
def processBlock(self, block, lba):
iv = self.volume.ivForLBA(lba)
ciphertext = AESencryptCBC(block, self.volume.emfkey, iv)
if not self.ivkey:
clear = AESdecryptCBC(ciphertext, self.filekey, iv)
else:
clear = ""
for i in xrange(len(block)/0x1000):
iv = self.volume.ivForLBA(self.decrypt_offset, False)
iv = AESencryptCBC(iv, self.ivkey)
clear += AESdecryptCBC(ciphertext[i*0x1000:(i+1)*0x1000], self.filekey,iv)
self.decrypt_offset += 0x1000
return clear
def decryptFile(self):
self.decrypt_offset = 0
bs = self.volume.blockSize
for extent in self.extents:
for i in xrange(extent.blockCount):
lba = extent.startBlock+i
data = self.volume.readBlock(lba)
if len(data) == bs:
clear = self.processBlock(data, lba)
self.volume.writeBlock(lba, clear)
class EMFVolume(HFSVolume):
def __init__(self, bdev, device_infos, **kwargs):
super(EMFVolume,self).__init__(bdev, **kwargs)
volumeid = self.volumeID().encode("hex")
if not device_infos:
dirname = os.path.dirname(bdev.filename)
device_infos = search_plist(dirname, {"dataVolumeUUID":volumeid})
if not device_infos:
raise Exception("Missing keyfile")
try:
self.emfkey = None
if device_infos.has_key("EMF"):
self.emfkey = device_infos["EMF"].decode("hex")
self.lbaoffset = device_infos["dataVolumeOffset"]
self.keybag = Keybag.createWithPlist(device_infos)
except:
raise #Exception("Invalid keyfile")
rootxattr = self.getXattr(kHFSRootParentID, "com.apple.system.cprotect")
self.decrypted = (self.header.finderInfo[3] == FLAG_DECRYPTED)
self.cp_major_version = None
self.cp_root = None
if rootxattr == None:
print "(No root com.apple.system.cprotect xattr)"
else:
self.cp_root = cp_root_xattr.parse(rootxattr)
ver = self.cp_root.major_version
print "cprotect version : %d (iOS %d)" % (ver, 4 + int(ver != 2))
assert self.cp_root.major_version == 2 or self.cp_root.major_version == 4
self.cp_major_version = self.cp_root.major_version
self.keybag = loadKeybagFromVolume(self, device_infos)
def ivForLBA(self, lba, add=True):
iv = ""
if add:
lba = lba + self.lbaoffset
lba &= 0xffffffff
for _ in xrange(4):
if (lba & 1):
lba = 0x80000061 ^ (lba >> 1);
else:
lba = lba >> 1;
iv += struct.pack("<L", lba)
return iv
def getFileKeyForCprotect(self, cp):
if self.cp_major_version == None:
self.cp_major_version = struct.unpack("<H", cp[:2])[0]
cprotect = cprotect_xattr.parse(cp)
return self.keybag.unwrapKeyForClass(cprotect.persistent_class, cprotect.persistent_key)
def getFileKeyForFileId(self, fileid):
cprotect = self.getXattr(fileid, "com.apple.system.cprotect")
if cprotect == None:
return None
return self.getFileKeyForCprotect(cprotect)
def readFile(self, path, outFolder="./", returnString=False):
k,v = self.catalogTree.getRecordFromPath(path)
if not v:
print "File %s not found" % path
return
assert v.recordType == kHFSPlusFileRecord
cprotect = self.getXattr(v.data.fileID, "com.apple.system.cprotect")
if cprotect == None or not self.cp_root or self.decrypted:
#print "cprotect attr not found, reading normally"
return super(EMFVolume, self).readFile(path, returnString=returnString)
filekey = self.getFileKeyForCprotect(cprotect)
if not filekey:
print "Cannot unwrap file key for file %s protection_class=%d" % (path, cprotect_xattr.parse(cprotect).persistent_class)
return
f = EMFFile(self, v.data.dataFork, v.data.fileID, filekey)
if returnString:
return f.readAllBuffer()
f.readAll(outFolder + os.path.basename(path))
return True
def flagVolume(self, flag):
self.header.finderInfo[3] = flag
h = HFSPlusVolumeHeader.build(self.header)
return self.bdev.write(0x400, h)
def decryptAllFiles(self):
if self.header.finderInfo[3] == FLAG_DECRYPTING:
print "Volume is half-decrypted, aborting (finderInfo[3] == FLAG_DECRYPTING)"
return
elif self.header.finderInfo[3] == FLAG_DECRYPTED:
print "Volume already decrypted (finderInfo[3] == FLAG_DECRYPTED)"
return
self.failedToGetKey = []
self.notEncrypted = []
self.decryptedCount = 0
self.flagVolume(FLAG_DECRYPTING)
self.catalogTree.traverseLeafNodes(callback=self.decryptFile)
self.flagVolume(FLAG_DECRYPTED)
print "Decrypted %d files" % self.decryptedCount
print "Failed to unwrap keys for : ", self.failedToGetKey
print "Not encrypted files : %d" % len(self.notEncrypted)
def decryptFile(self, k,v):
if v.recordType == kHFSPlusFileRecord:
filename = getString(k).encode("utf-8")
cprotect = self.getXattr(v.data.fileID, "com.apple.system.cprotect")
if not cprotect:
self.notEncrypted.append(filename)
return
fk = self.getFileKeyForCprotect(cprotect)
if not fk:
self.failedToGetKey.append(filename)
return
print "Decrypting", filename
f = EMFFile(self, v.data.dataFork, v.data.fileID, fk)
f.decryptFile()
self.decryptedCount += 1
def list_protected_files(self):
self.protected_dict = {}
self.xattrTree.traverseLeafNodes(callback=self.inspectXattr)
for k in self.protected_dict.keys():
print k
for v in self.protected_dict[k]: print "\t",v
print ""
def inspectXattr(self, k, v):
if getString(k) == "com.apple.system.cprotect" and k.fileID != kHFSRootParentID:
c = cprotect_xattr.parse(v.data)
if c.persistent_class != NSProtectionNone:
#desc = "%d %s" % (k.fileID, self.getFullPath(k.fileID))
desc = "%s" % self.getFullPath(k.fileID)
self.protected_dict.setdefault(PROTECTION_CLASSES.get(c.persistent_class),[]).append(desc)
#print k.fileID, self.getFullPath(k.fileID), PROTECTION_CLASSES.get(c.persistent_class)

View File

@@ -0,0 +1,315 @@
from btree import AttributesTree, CatalogTree, ExtentsOverflowTree
from structs import *
from util import write_file
from util.bdev import FileBlockDevice
import datetime
import hashlib
import os
import struct
import sys
import zlib
def hfs_date(t):
return datetime.datetime(1904,1,1) + datetime.timedelta(seconds=t)
class HFSFile(object):
def __init__(self, volume, hfsplusfork, fileID, deleted=False):
self.volume = volume
self.blockSize = volume.blockSize
self.fileID = fileID
self.totalBlocks = hfsplusfork.totalBlocks
self.logicalSize = hfsplusfork.logicalSize
self.extents = volume.getAllExtents(hfsplusfork, fileID)
self.deleted = deleted
def readAll(self, outputfile, truncate=True):
f = open(outputfile, "wb")
for i in xrange(self.totalBlocks):
f.write(self.readBlock(i))
if truncate:
f.truncate(self.logicalSize)
f.close()
def readAllBuffer(self, truncate=True):
r = ""
for i in xrange(self.totalBlocks):
r += self.readBlock(i)
if truncate:
r = r[:self.logicalSize]
return r
def processBlock(self, block, lba):
return block
def readBlock(self, n):
bs = self.volume.blockSize
if n*bs > self.logicalSize:
return "BLOCK OUT OF BOUNDS" + "\xFF" * (bs - len("BLOCK OUT OF BOUNDS"))
bc = 0
for extent in self.extents:
bc += extent.blockCount
if n < bc:
lba = extent.startBlock+(n-(bc-extent.blockCount))
if not self.deleted and self.fileID != kHFSAllocationFileID and not self.volume.isBlockInUse(lba):
print "FAIL, block %x not marked as used" % n
return self.processBlock(self.volume.readBlock(lba), lba)
return ""
def getLBAforBlock(self, n):
bc = 0
for extent in self.extents:
bc += extent.blockCount
if n < bc:
return extent.startBlock+(n-(bc-extent.blockCount))
def writeBlock(self, n, data):
bs = self.volume.blockSize
if n*bs > self.logicalSize:
raise Exception("writeBlock, out of bounds %d" % n)
bc = 0
for extent in self.extents:
bc += extent.blockCount
if n < bc:
lba = extent.startBlock+(n-(bc-extent.blockCount))
self.volume.writeBlock(lba, data)
return
class HFSCompressedResourceFork(HFSFile):
def __init__(self, volume, hfsplusfork, fileID):
super(HFSCompressedResourceFork,self).__init__(volume, hfsplusfork, fileID)
block0 = self.readBlock(0)
self.header = HFSPlusCmpfRsrcHead.parse(block0)
print self.header
self.blocks = HFSPlusCmpfRsrcBlockHead.parse(block0[self.header.headerSize:])
print "HFSCompressedResourceFork numBlocks:", self.blocks.numBlocks
#HAX, readblock not implemented
def readAllBuffer(self):
buff = super(HFSCompressedResourceFork, self).readAllBuffer()
r = ""
base = self.header.headerSize + 4
for b in self.blocks.HFSPlusCmpfRsrcBlock:
r += zlib.decompress(buff[base+b.offset:base+b.offset+b.size])
return r
class HFSVolume(object):
def __init__(self, bdev):
self.bdev = bdev
try:
data = self.bdev.readBlock(0)
self.header = HFSPlusVolumeHeader.parse(data[0x400:0x800])
assert self.header.signature == 0x4858 or self.header.signature == 0x482B
except:
raise
#raise Exception("Not an HFS+ image")
self.blockSize = self.header.blockSize
self.bdev.setBlockSize(self.blockSize)
#if os.path.getsize(filename) < self.header.totalBlocks * self.blockSize:
# print "WARNING: HFS image appears to be truncated"
self.allocationFile = HFSFile(self, self.header.allocationFile, kHFSAllocationFileID)
self.allocationBitmap = self.allocationFile.readAllBuffer()
self.extentsFile = HFSFile(self, self.header.extentsFile, kHFSExtentsFileID)
self.extentsTree = ExtentsOverflowTree(self.extentsFile)
self.catalogFile = HFSFile(self, self.header.catalogFile, kHFSCatalogFileID)
self.xattrFile = HFSFile(self, self.header.attributesFile, kHFSAttributesFileID)
self.catalogTree = CatalogTree(self.catalogFile, self)
self.xattrTree = AttributesTree(self.xattrFile)
self.hasJournal = self.header.attributes & (1 << kHFSVolumeJournaledBit)
def readBlock(self, b):
return self.bdev.readBlock(b)
def writeBlock(self, lba, data):
return self.bdev.writeBlock(lba, data)
def volumeID(self):
return struct.pack(">LL", self.header.finderInfo[6], self.header.finderInfo[7])
def isBlockInUse(self, block):
thisByte = ord(self.allocationBitmap[block / 8])
return (thisByte & (1 << (7 - (block % 8)))) != 0
def unallocatedBlocks(self):
for i in xrange(self.header.totalBlocks):
if not self.isBlockInUse(i):
yield i, self.read(i*self.blockSize, self.blockSize)
def getExtentsOverflowForFile(self, fileID, startBlock, forkType=kForkTypeData):
return self.extentsTree.searchExtents(fileID, forkType, startBlock)
def getXattr(self, fileID, name):
return self.xattrTree.searchXattr(fileID, name)
def getFileByPath(self, path):
return self.catalogTree.getRecordFromPath(path)
def getFileIDByPath(self, path):
key, record = self.catalogTree.getRecordFromPath(path)
if not record:
return
if record.recordType == kHFSPlusFolderRecord:
return record.data.folderID
return record.data.fileID
def listFolderContents(self, path):
k,v = self.catalogTree.getRecordFromPath(path)
if not k or v.recordType != kHFSPlusFolderRecord:
return
for k,v in self.catalogTree.getFolderContents(v.data.folderID):
if v.recordType == kHFSPlusFolderRecord:
#.HFS+ Private Directory Data\r
print v.data.folderID, getString(k).replace("\r","") + "/"
elif v.recordType == kHFSPlusFileRecord:
print v.data.fileID, getString(k)
def ls(self, path):
k,v = self.catalogTree.getRecordFromPath(path)
return self._ls(k, v)
def _ls(self, k, v):
res = {}
if not k or v.recordType != kHFSPlusFolderRecord:
return None
for k,v in self.catalogTree.getFolderContents(v.data.folderID):
if v.recordType == kHFSPlusFolderRecord:
#.HFS+ Private Directory Data\r
res[getString(k).replace("\r","") + "/"] = v.data
elif v.recordType == kHFSPlusFileRecord:
res[getString(k)] = v.data
return res
def listXattrs(self, path):
k,v = self.catalogTree.getRecordFromPath(path)
if k and v.recordType == kHFSPlusFileRecord:
return self.xattrTree.getAllXattrs(v.data.fileID)
elif k and v.recordType == kHFSPlusFolderThreadRecord:
return self.xattrTree.getAllXattrs(v.data.folderID)
def readFileByRecord(self, record):
assert record.recordType == kHFSPlusFileRecord
xattr = self.getXattr(record.data.fileID, "com.apple.decmpfs")
data = None
if xattr:
decmpfs = HFSPlusDecmpfs.parse(xattr)
if decmpfs.compression_type == 1:
return xattr[16:]
elif decmpfs.compression_type == 3:
if decmpfs.uncompressed_size == len(xattr) - 16:
return xattr[16:]
return zlib.decompress(xattr[16:])
elif decmpfs.compression_type == 4:
f = HFSCompressedResourceFork(self, record.data.resourceFork, record.data.fileID)
data = f.readAllBuffer()
return data
f = HFSFile(self, record.data.dataFork, record.data.fileID)
return f.readAllBuffer()
#TODO: returnString compress
def readFile(self, path, outFolder="./", returnString=False):
k,v = self.catalogTree.getRecordFromPath(path)
if not v:
print "File %s not found" % path
return
assert v.recordType == kHFSPlusFileRecord
xattr = self.getXattr(v.data.fileID, "com.apple.decmpfs")
if xattr:
decmpfs = HFSPlusDecmpfs.parse(xattr)
if decmpfs.compression_type == 1:
return xattr[16:]
elif decmpfs.compression_type == 3:
if decmpfs.uncompressed_size == len(xattr) - 16:
z = xattr[16:]
else:
z = zlib.decompress(xattr[16:])
open(outFolder + os.path.basename(path), "wb").write(z)
return
elif decmpfs.compression_type == 4:
f = HFSCompressedResourceFork(self, v.data.resourceFork, v.data.fileID)
z = f.readAllBuffer()
open(outFolder + os.path.basename(path), "wb").write(z)
return z
f = HFSFile(self, v.data.dataFork, v.data.fileID)
if returnString:
return f.readAllBuffer()
else:
f.readAll(outFolder + os.path.basename(path))
def readJournal(self):
#jb = self.read(self.header.journalInfoBlock * self.blockSize, self.blockSize)
#jib = JournalInfoBlock.parse(jb)
#return self.read(jib.offset,jib.size)
return self.readFile("/.journal", returnString=True)
def listAllFileIds(self):
self.fileids={}
self.catalogTree.traverseLeafNodes(callback=self.grabFileId)
return self.fileids
def grabFileId(self, k,v):
if v.recordType == kHFSPlusFileRecord:
self.fileids[v.data.fileID] = True
def getFileRecordForFileID(self, fileID):
k,v = self.catalogTree.searchByCNID(fileID)
return v
def getFullPath(self, fileID):
k,v = self.catalogTree.search((fileID, ""))
if not k:
print "File ID %d not found" % fileID
return ""
p = getString(v.data)
while k:
k,v = self.catalogTree.search((v.data.parentID, ""))
if k.parentID == kHFSRootFolderID:
break
p = getString(v.data) + "/" + p
return "/" + p
def getFileRecordForPath(self, path):
k,v = self.catalogTree.getRecordFromPath(path)
if not k:
return
return v.data
def getAllExtents(self, hfsplusfork, fileID):
b = 0
extents = []
for extent in hfsplusfork.HFSPlusExtentDescriptor:
extents.append(extent)
b += extent.blockCount
while b != hfsplusfork.totalBlocks:
k,v = self.getExtentsOverflowForFile(fileID, b)
if not v:
print "extents overflow missing, startblock=%d" % b
break
for extent in v:
extents.append(extent)
b += extent.blockCount
return extents
def dohashFiles(self, k,v):
if v.recordType == kHFSPlusFileRecord:
filename = getString(k)
f = HFSFile(self, v.data.dataFork, v.data.fileID)
print filename, hashlib.sha1(f.readAllBuffer()).hexdigest()
def hashFiles(self):
self.catalogTree.traverseLeafNodes(callback=self.dohashFiles)
if __name__ == "__main__":
v = HFSVolume("myramdisk.dmg",offset=0x40)
v.listFolderContents("/")
print v.readFile("/usr/local/share/restore/imeisv_svn.plist")
print v.listXattrs("/usr/local/share/restore/imeisv_svn.plist")

View File

@@ -0,0 +1,152 @@
from crypto.aes import AESencryptCBC, AESdecryptCBC
from emf import cprotect_xattr, EMFFile
from structs import *
from util import write_file, sizeof_fmt
import hashlib
"""
Implementation of the following paper :
Using the HFS+ Journal For Deleted File Recovery. Aaron Burghardt, Adam Feldman. DFRWS 2008
http://www.dfrws.org/2008/proceedings/p76-burghardt.pdf
http://www.dfrws.org/2008/proceedings/p76-burghardt_pres.pdf
"""
def carveBtreeNode(node, kClass, dClass):
try:
btnode = BTNodeDescriptor.parse(node)
if btnode.kind == kBTLeafNode:
off = BTNodeDescriptor.sizeof()
recs = []
offsets = Array(btnode.numRecords, UBInt16("off")).parse(node[-2*btnode.numRecords:])
for i in xrange(btnode.numRecords):
off = offsets[btnode.numRecords-i-1]
k = kClass.parse(node[off:])
off += 2 + k.keyLength
d = dClass.parse(node[off:])
recs.append((k,d))
return recs
return []
except:
return []
"""
for standard HFS volumes
"""
def carveHFSVolumeJournal(volume):
journal = volume.readJournal()
hdr = journal_header.parse(journal)
sector_size = hdr.jhdr_size
nodeSize = volume.catalogTree.nodeSize
f={}
for i in xrange(0,len(journal), sector_size):
for k,v in carveBtreeNode(journal[i:i+nodeSize],HFSPlusCatalogKey, HFSPlusCatalogData):
if v.recordType == kHFSPlusFileRecord:
name = getString(k)
h = hashlib.sha1(HFSPlusCatalogKey.build(k)).digest()
if f.has_key(h):
continue
if volume.catalogTree.searchByCNID(v.data.fileID) == (None, None):
if volume.isBlockInUse(v.data.dataFork.HFSPlusExtentDescriptor[0].startBlock) == False:
print "deleted file", v.data.fileID, name
fileid = v.data.fileID
f[h]=(name, v)
return f.values()
magics=["SQLite", "bplist", "<?xml", "\xFF\xD8\xFF", "\xCE\xFA\xED\xFE", "\x89PNG", "\x00\x00\x00\x1CftypM4A",
"\x00\x00\x00\x14ftypqt"]
"""
HAX: should do something better like compute entropy or something
"""
def isDecryptedCorrectly(data):
for m in magics:
if data.startswith(m):
return True
return False
"""
carve the journal for deleted cprotect xattrs and file records
"""
def carveEMFVolumeJournal(volume):
journal = volume.readJournal()
print "Journal size : %s" % sizeof_fmt(len(journal))
hdr = journal_header.parse(journal)
sector_size = hdr.jhdr_size
nodeSize = volume.catalogTree.nodeSize
print "Collecting existing file ids"
fileIds = volume.listAllFileIds()
print "%d file IDs" % len(fileIds.keys())
files = {}
keys = {}
for i in xrange(0,len(journal),sector_size):
for k,v in carveBtreeNode(journal[i:i+nodeSize],HFSPlusCatalogKey, HFSPlusCatalogData):
if v.recordType == kHFSPlusFileRecord:
name = getString(k)
h = hashlib.sha1(HFSPlusCatalogKey.build(k)).digest()
if files.has_key(h):
continue
if not fileIds.has_key(v.data.fileID):
#we only keep files where the first block is not marked as in use
if volume.isBlockInUse(v.data.dataFork.HFSPlusExtentDescriptor[0].startBlock) == False:
print "Found deleted file record", v.data.fileID, name
files[h] = (name,v)
for k,v in carveBtreeNode(journal[i:i+nodeSize],HFSPlusAttrKey, HFSPlusAttrData):
if getString(k) == "com.apple.system.cprotect":
if not fileIds.has_key(k.fileID):
filekeys = keys.setdefault(k.fileID, [])
try:
cprotect = cprotect_xattr.parse(v.data)
except:
continue
#assert cprotect.xattr_major_version == 2
filekey = volume.keybag.unwrapKeyForClass(cprotect.persistent_class, cprotect.persistent_key)
if filekey and not filekey in filekeys:
print "Found key for file", k.fileID
filekeys.append(filekey)
return files.values(), keys
"""
"bruteforce" method, tries to decrypt all unallocated blocks with provided file keys
this is a hack, don't expect interesting results with this
"""
def carveEMFemptySpace(volume, file_keys, outdir):
for lba, block in volume.unallocatedBlocks():
iv = volume.ivForLBA(lba)
for filekey in file_keys:
ciphertext = AESencryptCBC(block, volume.emfkey, iv)
clear = AESdecryptCBC(ciphertext, filekey, iv)
if isDecryptedCorrectly(clear):
print "Decrypted stuff at lba %x" % lba
open(outdir+ "/%x.bin" % lba, "wb").write(clear)
def do_emf_carving(volume, carveokdir, carvenokdir):
deletedFiles, filekeys = carveEMFVolumeJournal(volume)
print "Journal carving done, trying to extract deleted files"
n = 0
for name, vv in deletedFiles:
for filekey in filekeys.get(vv.data.fileID, []):
ff = EMFFile(volume,vv.data.dataFork, vv.data.fileID, filekey, deleted=True)
data = ff.readAllBuffer()
if isDecryptedCorrectly(data):
write_file(carveokdir + "%d_%s" % (vv.data.fileID, name.replace("/","_")),data)
n += 1
else:
write_file(carvenokdir + "%d_%s" % (vv.data.fileID, name.replace("/","_")),data)
if not filekeys.has_key(vv.data.fileID):
print "Missing file key for", name
else:
del filekeys[vv.data.fileID]
print "Done, extracted %d files" % n
if False:
fks = set(reduce(lambda x,y: x+y, filekeys.values()))
print "%d file keys left, try carving empty space (slow) ? CTRL-C to exit" % len(fks)
raw_input()
carveEMFemptySpace(volume, fks)

View File

@@ -0,0 +1,341 @@
from construct import *
from construct.macros import UBInt64
"""
http://developer.apple.com/library/mac/#technotes/tn/tn1150.html
"""
def getString(obj):
return obj.HFSUniStr255.unicode
S_IFLNK = 0120000
kSymLinkFileType = 0x736C6E6B
kSymLinkCreator = 0x72686170
kHardLinkFileType = 0x686C6E6B
kHFSPlusCreator = 0x6866732B
kHFSCaseFolding = 0xCF
kHFSBinaryCompare = 0xBC
def is_symlink(rec):
return rec.FileInfo.fileCreator == kSymLinkCreator and rec.FileInfo.fileType == kSymLinkFileType
kHFSRootParentID = 1
kHFSRootFolderID = 2
kHFSExtentsFileID = 3
kHFSCatalogFileID = 4
kHFSBadBlockFileID = 5
kHFSAllocationFileID = 6
kHFSStartupFileID = 7
kHFSAttributesFileID = 8
kHFSRepairCatalogFileID = 14
kHFSBogusExtentFileID = 15
kHFSFirstUserCatalogNodeID = 16
kBTLeafNode = -1
kBTIndexNode = 0
kBTHeaderNode = 1
kBTMapNode = 2
kHFSPlusFolderRecord = 0x0001
kHFSPlusFileRecord = 0x0002
kHFSPlusFolderThreadRecord = 0x0003
kHFSPlusFileThreadRecord = 0x0004
kHFSPlusAttrInlineData = 0x10
kHFSPlusAttrForkData = 0x20
kHFSPlusAttrExtents = 0x30
kForkTypeData = 0
kForkTypeRsrc = 0xFF
kHFSVolumeHardwareLockBit = 7
kHFSVolumeUnmountedBit = 8
kHFSVolumeSparedBlocksBit = 9
kHFSVolumeNoCacheRequiredBit = 10
kHFSBootVolumeInconsistentBit = 11
kHFSCatalogNodeIDsReusedBit = 12
kHFSVolumeJournaledBit = 13
kHFSVolumeSoftwareLockBit = 15
DECMPFS_MAGIC = 0x636d7066 #cmpf
HFSPlusExtentDescriptor = Struct("HFSPlusExtentDescriptor",
UBInt32("startBlock"),
UBInt32("blockCount")
)
HFSPlusExtentRecord = Array(8,HFSPlusExtentDescriptor)
HFSPlusForkData = Struct("HFSPlusForkData",
UBInt64("logicalSize"),
UBInt32("clumpSize"),
UBInt32("totalBlocks"),
Array(8, HFSPlusExtentDescriptor)
)
HFSPlusVolumeHeader= Struct("HFSPlusVolumeHeader",
UBInt16("signature"),
UBInt16("version"),
UBInt32("attributes"),
UBInt32("lastMountedVersion"),
UBInt32("journalInfoBlock"),
UBInt32("createDate"),
UBInt32("modifyDate"),
UBInt32("backupDate"),
UBInt32("checkedDate"),
UBInt32("fileCount"),
UBInt32("folderCount"),
UBInt32("blockSize"),
UBInt32("totalBlocks"),
UBInt32("freeBlocks"),
UBInt32("nextAllocation"),
UBInt32("rsrcClumpSize"),
UBInt32("dataClumpSize"),
UBInt32("nextCatalogID"),
UBInt32("writeCount"),
UBInt64("encodingsBitmap"),
Array(8, UBInt32("finderInfo")),
Struct("allocationFile", Embed(HFSPlusForkData)),
Struct("extentsFile", Embed(HFSPlusForkData)),
Struct("catalogFile", Embed(HFSPlusForkData)),
Struct("attributesFile", Embed(HFSPlusForkData)),
Struct("startupFile", Embed(HFSPlusForkData)),
)
BTNodeDescriptor = Struct("BTNodeDescriptor",
UBInt32("fLink"),
UBInt32("bLink"),
SBInt8("kind"),
UBInt8("height"),
UBInt16("numRecords"),
UBInt16("reserved")
)
BTHeaderRec = Struct("BTHeaderRec",
UBInt16("treeDepth"),
UBInt32("rootNode"),
UBInt32("leafRecords"),
UBInt32("firstLeafNode"),
UBInt32("lastLeafNode"),
UBInt16("nodeSize"),
UBInt16("maxKeyLength"),
UBInt32("totalNodes"),
UBInt32("freeNodes"),
UBInt16("reserved1"),
UBInt32("clumpSize"),
UBInt8("btreeType"),
UBInt8("keyCompareType"),
UBInt32("attributes"),
Array(16, UBInt32("reserved3"))
)
HFSUniStr255 = Struct("HFSUniStr255",
UBInt16("length"),
String("unicode", lambda ctx: ctx["length"] * 2, encoding="utf-16-be")
)
HFSPlusAttrKey = Struct("HFSPlusAttrKey",
UBInt16("keyLength"),
UBInt16("pad"),
UBInt32("fileID"),
UBInt32("startBlock"),
HFSUniStr255,
#UBInt32("nodeNumber")
)
HFSPlusAttrData = Struct("HFSPlusAttrData",
UBInt32("recordType"),
Array(2, UBInt32("reserved")),
UBInt32("size"),
MetaField("data", lambda ctx: ctx["size"])
)
HFSPlusCatalogKey = Struct("HFSPlusCatalogKey",
UBInt16("keyLength"),
UBInt32("parentID"),
HFSUniStr255
)
HFSPlusBSDInfo = Struct("HFSPlusBSDInfo",
UBInt32("ownerID"),
UBInt32("groupID"),
UBInt8("adminFlags"),
UBInt8("ownerFlags"),
UBInt16("fileMode"),
UBInt32("union_special")
)
Point = Struct("Point",
SBInt16("v"),
SBInt16("h")
)
Rect = Struct("Rect",
SBInt16("top"),
SBInt16("left"),
SBInt16("bottom"),
SBInt16("right")
)
FileInfo = Struct("FileInfo",
UBInt32("fileType"),
UBInt32("fileCreator"),
UBInt16("finderFlags"),
Point,
UBInt16("reservedField")
)
ExtendedFileInfo = Struct("ExtendedFileInfo",
Array(4, SBInt16("reserved1")),
UBInt16("extendedFinderFlags"),
SBInt16("reserved2"),
SBInt32("putAwayFolderID")
)
FolderInfo = Struct("FolderInfo",
Rect,
UBInt16("finderFlags"),
Point,
UBInt16("reservedField")
)
ExtendedFolderInfo = Struct("ExtendedFolderInfo",
Point,
SBInt32("reserved1"),
UBInt16("extendedFinderFlags"),
SBInt16("reserved2"),
SBInt32("putAwayFolderID")
)
HFSPlusCatalogFolder = Struct("HFSPlusCatalogFolder",
UBInt16("flags"),
UBInt32("valence"),
UBInt32("folderID"),
UBInt32("createDate"),
UBInt32("contentModDate"),
UBInt32("attributeModDate"),
UBInt32("accessDate"),
UBInt32("backupDate"),
HFSPlusBSDInfo,
FolderInfo,
ExtendedFolderInfo,
UBInt32("textEncoding"),
UBInt32("reserved")
)
HFSPlusCatalogFile = Struct("HFSPlusCatalogFile",
UBInt16("flags"),
UBInt32("reserved1"),
UBInt32("fileID"),
UBInt32("createDate"),
UBInt32("contentModDate"),
UBInt32("attributeModDate"),
UBInt32("accessDate"),
UBInt32("backupDate"),
HFSPlusBSDInfo,
FileInfo,
ExtendedFileInfo,
UBInt32("textEncoding"),
UBInt32("reserved2"),
Struct("dataFork", Embed(HFSPlusForkData)),
Struct("resourceFork", Embed(HFSPlusForkData))
)
HFSPlusCatalogThread = Struct("HFSPlusCatalogThread",
SBInt16("reserved"),
UBInt32("parentID"),
HFSUniStr255,
)
HFSPlusCatalogData = Struct("HFSPlusCatalogData",
UBInt16("recordType"),
Switch("data", lambda ctx: ctx["recordType"],
{
kHFSPlusFolderRecord : HFSPlusCatalogFolder,
kHFSPlusFileRecord : HFSPlusCatalogFile,
kHFSPlusFolderThreadRecord: HFSPlusCatalogThread,
kHFSPlusFileThreadRecord: HFSPlusCatalogThread
},
default=HFSPlusCatalogFolder #XXX: should not reach
)
)
HFSPlusExtentKey = Struct("HFSPlusExtentKey",
UBInt16("keyLength"),
UBInt8("forkType"),
UBInt8("pad"),
UBInt32("fileID"),
UBInt32("startBlock")
)
HFSPlusDecmpfs = Struct("HFSPlusDecmpfs ",
ULInt32("compression_magic"),
ULInt32("compression_type"),
ULInt64("uncompressed_size"),
)
HFSPlusCmpfRsrcHead = Struct("HFSPlusCmpfRsrcHead",
UBInt32("headerSize"),
UBInt32("totalSize"),
UBInt32("dataSize"),
UBInt32("flags")
)
HFSPlusCmpfRsrcBlock = Struct("HFSPlusCmpfRsrcBlock",
ULInt32("offset"),
ULInt32("size")
)
HFSPlusCmpfRsrcBlockHead = Struct("HFSPlusCmpfRsrcBlockHead",
UBInt32("dataSize"),
ULInt32("numBlocks"),
Array(lambda ctx:ctx["numBlocks"], HFSPlusCmpfRsrcBlock)
)
HFSPlusCmpfEnd = Struct("HFSPlusCmpfEnd",
Array(6, UBInt32("pad")),
UBInt16("unk1"),
UBInt16("unk2"),
UBInt16("unk3"),
UBInt32("magic"),
UBInt32("flags"),
UBInt64("size"),
UBInt32("unk4")
)
"""
Journal stuff
"""
JournalInfoBlock = Struct("JournalInfoBlock",
UBInt32("flags"),
Array(8, UBInt32("device_signature")),
UBInt64("offset"),
UBInt64("size"),
Array(32, UBInt32("reserved"))
)
journal_header = Struct("journal_header",
ULInt32("magic"),
ULInt32("endian"),
ULInt64("start"),
ULInt64("end"),
ULInt64("size"),
ULInt32("blhdr_size"),
ULInt32("checksum"),
ULInt32("jhdr_size")
)
block_info = Struct("block_info",
ULInt64("bnum"),
ULInt32("bsize"),
ULInt32("next")
)
block_list_header = Struct("block_list_header",
ULInt16("max_blocks"),
ULInt16("num_blocks"),
ULInt32("bytes_used"),
SLInt32("checksum"),
UBInt32("pad"),
Array(lambda ctx:ctx["num_blocks"], block_info)
)

View File

@@ -0,0 +1,366 @@
from cmd import Cmd
from firmware.img3 import Img3
from hfs.emf import cprotect_xattr, PROTECTION_CLASSES
from hfs.hfs import hfs_date
from keystore.keybag import Keybag, PROTECTION_CLASSES
from nand.carver import NANDCarver
from nand.nand import NAND
from optparse import OptionParser
from util import hexdump, makedirs, write_file, parsePlist, sizeof_fmt,\
readPlist
from util.bruteforce import bruteforcePasscode
from util.ramdiskclient import RamdiskToolClient
import os
import plistlib
import struct
from pprint import pprint
from keychain import keychain_load
from nand.remote import IOFlashStorageKitClient
DEVICES_NAMES = {"m68ap": "iPhone 2G",
"n82ap": "iPhone 3G",
"n88ap": "iPhone 3GS",
"n90ap": "iPhone 4 GSM",
"n92ap": "iPhone 4 CDMA",
"n72ap": "iPod Touch 2G",
"n18ap": "iPod Touch 3G",
"n81ap": "iPod Touch 4G",
"k48ap": "iPad 1",
}
def print_device_infos(d):
print "Device model:", DEVICES_NAMES.get(d["hwModel"].lower(), d["hwModel"])
print "UDID:", d["udid"]
print "ECID:", d.get("ECID")
print "Serial number:", d["serialNumber"]
for k in ["key835", "key89B"]:
if d.has_key(k): print "%s: %s" % (k, d[k])
def grab_system_version(system, device_infos):
SystemVersion = system.readFile("/System/Library/CoreServices/SystemVersion.plist", returnString=True)
if SystemVersion:
SystemVersion = plistlib.readPlistFromString(SystemVersion)
print "iOS version: ", SystemVersion.get("ProductVersion")
def get_device_name(dataVolume):
preferences = dataVolume.readFile("/preferences/SystemConfiguration/preferences.plist", returnString=True)
if preferences:
preferences = parsePlist(preferences)
return preferences.get("System", {}).get("System", {}).get("ComputerName", "[device name found]")
return "[device name not found]"
def jailbreak_check(system):
#lazy jailbreak check
binsh = system.readFile("/bin/sh",returnString=True)
if binsh:
print "Device is probably jailbroken"
#fstab = system.readFile("/etc/fstab",returnString=True)
#XXX follow symlinks
#if fstab.count("rw") != 1:
# print "Device is probably jailbroken"
def check_kernel(system, device_infos):
kernel = system.readFile("/System/Library/Caches/com.apple.kernelcaches/kernelcache",returnString=True)
if not kernel: return
k3 = Img3("kernel", kernel)
if k3.sigcheck(device_infos.get("key89A","").decode("hex")):
print "Kernel signature check OK"
if kernel[0x40:0x50].startswith("complzss"):
print "Kernel is decrypted, probably jailbroken with redsn0w/pwnage tool"
class ExaminerShell(Cmd):
def __init__(self, image, completekey='tab', stdin=None, stdout=None):
Cmd.__init__(self, completekey=completekey, stdin=stdin, stdout=stdout)
self.curdir = "/"
self.rdisk = None
if image.filename == "remote":
self.rdisk = RamdiskToolClient.get()
self.device_infos = image.device_infos
self.complete_open = self._complete
self.complete_xattr = self._complete
self.complete_cprotect = self._complete
self.complete_ls = self._complete
self.complete_cd = self._complete
self.complete_plist = self._complete
self.complete_xxd = self._complete
self.image = image
self.system = image.getPartitionVolume(0)
self.data = image.getPartitionVolume(1)
self.volume = None
self.volname = ""
grab_system_version(self.system, self.device_infos)
print "Keybag state: %slocked" % (int(self.data.keybag.unlocked) * "un")
self.deviceName = get_device_name(self.data)
self.do_data("")
self.savepath = os.path.join(os.path.dirname(image.filename), "%s.plist" % self.device_infos.udid[:10])
#if image.iosVersion > 3 and not image.device_infos.has_key("passcode"):
# print "No passcode found in plist file, bruteforce required to access protected data"
self.carver = None
def set_partition(self, name, vol):
self.volume = vol
self.do_cd("/")
self.volname = name
self.prompt = "(%s-%s) %s " % (self.deviceName, self.volname, self.curdir)
def do_info(self, p):
pprint(self.device_infos)
def do_save(self, p):
print "Save device information plist to [%s]:" % self.savepath,
path2 = raw_input()
if path2: self.savepath = path2
if os.path.exists(self.savepath):
print "File already exists, overwrite ? [y/n]:",
if raw_input() != "y":
return
plistlib.writePlist(self.device_infos, self.savepath)
def do_system(self, p):
self.set_partition("system", self.system)
def do_data(self, p):
self.set_partition("data", self.data)
def do_pix(self, p):
self.do_data("")
self.do_cd("/mobile/Media/DCIM/100APPLE")
def do_keychain(self, p):
self.data.readFile("/Keychains/keychain-2.db")
keychain = keychain_load("keychain-2.db", self.data.keybag, self.image.device_infos["key835"].decode("hex"))
keychain.print_all(False)
def do_keychain_cert(self, p):
t = p.split()
id = int(t[0])
if len(t) == 2: filename = t[1]
else: filename = ""
keychain = keychain_load("keychain-2.db", self.data.keybag, self.image.device_infos["key835"].decode("hex"))
keychain.cert(id, filename)
def do_keychain_key(self, p):
t = p.split()
id = int(t[0])
if len(t) == 2: filename = t[1]
else: filename = ""
keychain = keychain_load("keychain-2.db", self.data.keybag, self.image.device_infos["key835"].decode("hex"))
keychain.key(id, filename)
def do_exit(self, p):
return True
def do_quit(self, p):
return self.do_exit(p)
def do_reboot(self, p):
if not self.rdisk:
self.rdisk = RamdiskToolClient.get()
self.rdisk.reboot()
return self.do_exit(p)
def do_pwd(self, p):
print self.curdir
def do_cd(self, p):
if len(p) == 0: p = "/"
if not p.startswith("/"):
new = self.curdir + p
else:
new = p
if not p.endswith("/"): new = new + "/"
d = self.volume.ls(new)
if d != None:
self.curdir = new
self.prompt = "(%s-%s) %s " % (self.deviceName, self.volname, new)
else:
print "%s not found/is not a directory" % new
def get_path(self, p):
path = p
if not path.startswith("/"):
path = self.curdir + path
return path
def _complete(self, text, line, begidx, endidx):
filename = text.split("/")[-1]
dirname = "/".join(text.split("/")[:-1])
if text.startswith("/"):
contents = self.volume.ls(dirname)
else:
contents = self.volume.ls(self.curdir + dirname)
if not contents:
return []
if dirname != "" and not dirname.endswith("/"):
dirname += "/"
res = [dirname + x for x in contents.keys() if x.startswith(filename)]
return res
#TODO if size==0 check if compressed
def do_ls(self, p):
dirDict = self.volume.ls((self.curdir + "/" + p).replace("//","/"))
if not dirDict:
return
for name in sorted(dirDict.keys()):
size = ""
protection_class = ""
record = dirDict[name]
if hasattr(record, "fileID"):
size = sizeof_fmt(record.dataFork.logicalSize)
cprotect = self.volume.getXattr(record.fileID, "com.apple.system.cprotect")
if cprotect:
protection_class = PROTECTION_CLASSES[struct.unpack("<L", cprotect[8:12])[0]]
print "%s\t%s\t%s\t%s" % (name[:30].ljust(30), size.ljust(10), hfs_date(record.createDate), protection_class)
def do_undelete(self, p):
if not self.data.keybag.unlocked:
print "Warning, keybag is not unlocked, some files will be inaccessible"
if not self.carver:
self.carver = NANDCarver(self.data, self.image)
if False:#len(p):
z = self.volume.catalogTree.getLBAsHax()
v = self.volume.getFileRecordForPath(self.curdir)
folderId = v.folderID
f = lambda k,v: k.parentID == folderId
else:
z = None
f = None
self.carver.carveDeletedFiles_fast(z, f)
#self.carver.carveDeleteFiles_slow(z, f)
def do_xattr(self, p):
xattr = self.volume.listXattrs(self.get_path(p))
if not xattr:
return
for name, value in xattr.items():
print name, value.encode("hex")
def do_protected_files(self, p):
self.data.list_protected_files()
def do_cprotect(self, p):
id = self.volume.getFileIDByPath(self.get_path(p))
if not id:
return
cprotect = self.volume.getXattr(id, "com.apple.system.cprotect")
if not cprotect:
return
cp = cprotect_xattr.parse(cprotect)
print cp
print "Protection class %d => %s" % (cp.persistent_class, PROTECTION_CLASSES.get(cp.persistent_class))
if not cp.persistent_key:
return
fk = self.volume.getFileKeyForCprotect(cprotect)
if fk:
print "Unwrapped file key : %s" % fk.encode("hex")
else:
print "Cannot decrypt file key"
def do_open(self, p):
path = self.get_path(p)
if self.volume.readFile(path):
os.startfile(os.path.basename(path))
def do_xxd(self, p):
t = p.split()
path = self.get_path(t[0])
data = self.volume.readFile(path, returnString=True)
if not data:
return
if len(t) > 1:
hexdump(data[:int(t[1])])
else:
hexdump(data)
def do_effaceable(self, p):
print "Effaceable Lockers"
for k,v in self.image.lockers.lockers.items():
print "%s: %s" % (k, v.encode("hex"))
def do_BAG1(self, p):
print "BAG1 locker from effaceable storage"
bag1 = self.image.lockers.get("BAG1")
hexdump(bag1)
print "IV:", bag1[4:20].encode("hex")
print "Key:", bag1[20:].encode("hex")
def do_keybag(self, p):
self.data.keybag.printClassKeys()
def do_plist(self, p):
d = None
data = self.volume.readFile(self.get_path(p), returnString=True)
if data:
d = parsePlist(data)
pprint(d)
else:
try:
d = readPlist(p)
if d: pprint(d)
except:
pass
if d and d.has_key("_MKBIV"):
print "=>System keybag file"
print "_MKBPAYLOAD: encrypted"
print "_MKBIV: %s" % d["_MKBIV"].data.encode("hex")
print "_MKBWIPEID: 0x%x (%s)" % (d["_MKBWIPEID"], ("%x"%(d["_MKBWIPEID"])).decode("hex"))
def do_bruteforce(self, p):
if bruteforcePasscode(self.image.device_infos, self.data):
print "Keybag state: %slocked" % (int(self.data.keybag.unlocked) * "un")
self.do_save("")
def do_ptable(self, p):
pt = self.image.getPartitionTable()
print "Block device partition table"
print "".join(map(lambda x:x.ljust(12), ["Index", "Name", "Start LBA", "End LBA", "Size"]))
for i in xrange(len(pt)):
p = pt[i]
print "".join(map(lambda x:str(x).ljust(12), [i, p.name, p.first_lba, p.last_lba, sizeof_fmt((p.last_lba - p.first_lba)*self.image.pageSize)]))
def do_nand_dump(self, p):
if len(p)==0:
print "Usage: nand_dump my_nand.bin"
return
self.image.dump(p)
def do_dd(self, p):
if len(p)==0:
print "Usage: dd output_file.dmg"
return
self.volume.bdev.dumpToFile(p.split()[0])
def do_img3(self, p):
self.image.extract_img3s()
def do_shsh(self, p):
self.image.extract_shsh()
def do_debug(self,p):
from IPython.Shell import IPShellEmbed
ipshell = IPShellEmbed()
ipshell(local_ns=locals())
def main():
parser = OptionParser(usage="%prog [options] nand_image.bin device_infos.plist")
(options, args) = parser.parse_args()
if len(args) >= 2:
plistname = args[1]
nandimagename = args[0]
device_infos = plistlib.readPlist(plistname)
print "Loading device information from %s" % plistname
else:
nandimagename ="remote"
client = RamdiskToolClient.get()
device_infos = client.device_infos
print_device_infos(device_infos)
image = NAND(nandimagename, device_infos)
ExaminerShell(image).cmdloop("")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,224 @@
#!/usr/bin/python
import plistlib
import zipfile
import struct
import sys
from optparse import OptionParser
from Crypto.Cipher import AES
from util.lzss import decompress_lzss
devices = {"n82ap": "iPhone1,2",
"n88ap": "iPhone2,1",
"n90ap": "iPhone3,1",
"n90bap": "iPhone3,2",
"n92ap": "iPhone3,3",
"n18ap": "iPod3,1",
"n81ap": "iPod4,1",
"k48ap": "iPad1,1",
"n72ap": "iPod2,1",
}
h=lambda x:x.replace(" ","").decode("hex")
#thx to 0x56
patchs_ios6 = {
"IOAESAccelerator enable UID" : (h("B0 F5 FA 6F 00 F0 92 80"), h("B0 F5 FA 6F 00 20 00 20")),
"_PE_i_can_has_debugger" : (h("80 B1 43 F2 BE 01 C0 F2"), h("01 20 70 47 BE 01 C0 F2")),
}
#https://github.com/comex/datautils0/blob/master/make_kernel_patchfile.c
patchs_ios5 = {
"CSED" : (h("df f8 88 33 1d ee 90 0f a2 6a 1b 68"), h("df f8 88 33 1d ee 90 0f a2 6a 01 23")),
"AMFI" : (h("D0 47 01 21 40 B1 13 35"), h("00 20 01 21 40 B1 13 35")),
"_PE_i_can_has_debugger" : (h("38 B1 05 49 09 68 00 29"), h("01 20 70 47 09 68 00 29")),
"task_for_pid_0" : (h("00 21 02 91 ba f1 00 0f 01 91 06 d1 02 a8"), h("00 21 02 91 ba f1 00 0f 01 91 06 e0 02 a8")),
"IOAESAccelerator enable UID" : (h("67 D0 40 F6"), h("00 20 40 F6")),
#not stritcly required, useful for testing
"getxattr system": ("com.apple.system.\x00", "com.apple.aaaaaa.\x00"),
"IOAES gid": (h("40 46 D4 F8 54 43 A0 47"), h("40 46 D4 F8 43 A0 00 20")),
#HAX to fit into the 40 char boot-args (redsn0w 0.9.10)
"nand-disable-driver": ("nand-disable-driver\x00", "nand-disable\x00\x00\x00\x00\x00\x00\x00\x00")
}
patchs_ios4 = {
"NAND_epoch" : ("\x90\x47\x83\x45", "\x90\x47\x00\x20"),
"CSED" : ("\x00\x00\x00\x00\x01\x00\x00\x00\x80\x00\x00\x00\x00\x00\x00\x00", "\x01\x00\x00\x00\x01\x00\x00\x00\x80\x00\x00\x00\x00\x00\x00\x00"),
"AMFI" : ("\x01\xD1\x01\x30\x04\xE0\x02\xDB", "\x00\x20\x01\x30\x04\xE0\x02\xDB"),
"_PE_i_can_has_debugger" : (h("48 B1 06 4A 13 68 13 B9"), h("01 20 70 47 13 68 13 B9")),
"IOAESAccelerator enable UID" : ("\x56\xD0\x40\xF6", "\x00\x00\x40\xF6"),
"getxattr system": ("com.apple.system.\x00", "com.apple.aaaaaa.\x00"),
}
patchs_armv6 = {
"NAND_epoch" : (h("00 00 5B E1 0E 00 00 0A"), h("00 00 5B E1 0E 00 00 EA")),
"CSED" : (h("00 00 00 00 01 00 00 00 80 00 00 00 00 00 00 00"), h("01 00 00 00 01 00 00 00 80 00 00 00 00 00 00 00")),
"AMFI" : (h("00 00 00 0A 00 40 A0 E3 04 00 A0 E1 90 80 BD E8"), h("00 00 00 0A 00 40 A0 E3 01 00 A0 E3 90 80 BD E8")),
"_PE_i_can_has_debugger" : (h("00 28 0B D0 07 4A 13 68 00 2B 02 D1 03 60 10 68"), h("01 20 70 47 07 4A 13 68 00 2B 02 D1 03 60 10 68")),
"IOAESAccelerator enable UID" : (h("5D D0 36 4B 9A 42"), h("00 20 36 4B 9A 42")),
"IOAES gid": (h("FA 23 9B 00 9A 42 05 D1"), h("00 20 00 20 9A 42 05 D1")),
"nand-disable-driver": ("nand-disable-driver\x00", "nand-disable\x00\x00\x00\x00\x00\x00\x00\x00"),
}
patchs_ios4_fixnand = {
"Please reboot => jump to prepare signature": (h("B0 47 DF F8 E8 04 F3 E1"), h("B0 47 DF F8 E8 04 1D E0")),
"prepare signature => jump to write signature": (h("10 43 18 60 DF F8 AC 04"), h("10 43 18 60 05 E1 00 20")),
"check write ok => infinite loop" : (h("A3 48 B0 47 01 24"), h("A3 48 B0 47 FE E7"))
}
#grab keys from redsn0w Keys.plist
class IPSWkeys(object):
def __init__(self, manifest):
self.keys = {}
buildi = manifest["BuildIdentities"][0]
dc = buildi["Info"]["DeviceClass"]
build = "%s_%s_%s" % (devices.get(dc,dc), manifest["ProductVersion"], manifest["ProductBuildVersion"])
try:
rs = plistlib.readPlist("Keys.plist")
except:
raise Exception("Get Keys.plist from redsn0w and place it in the current directory")
for k in rs["Keys"]:
if k["Build"] == build:
self.keys = k
break
def getKeyIV(self, filename):
if not self.keys.has_key(filename):
return None, None
k = self.keys[filename]
return k.get("Key",""), k.get("IV","")
def decryptImg3(blob, key, iv):
assert blob[:4] == "3gmI", "Img3 magic tag"
data = ""
for i in xrange(20, len(blob)):
tag = blob[i:i+4]
size, real_size = struct.unpack("<LL", blob[i+4:i+12])
if tag[::-1] == "DATA":
assert size >= real_size, "Img3 length check"
data = blob[i+12:i+size]
break
i += size
return AES.new(key, AES.MODE_CBC, iv).decrypt(data)[:real_size]
def main(ipswname, options):
ipsw = zipfile.ZipFile(ipswname)
manifest = plistlib.readPlistFromString(ipsw.read("BuildManifest.plist"))
kernelname = manifest["BuildIdentities"][0]["Manifest"]["KernelCache"]["Info"]["Path"]
devclass = manifest["BuildIdentities"][0]["Info"]["DeviceClass"]
kernel = ipsw.read(kernelname)
keys = IPSWkeys(manifest)
key,iv = keys.getKeyIV(kernelname)
if key == None:
print "No keys found for kernel"
return
print "Decrypting %s" % kernelname
kernel = decryptImg3(kernel, key.decode("hex"), iv.decode("hex"))
assert kernel.startswith("complzss"), "Decrypted kernelcache does not start with \"complzss\" => bad key/iv ?"
print "Unpacking ..."
kernel = decompress_lzss(kernel)
assert kernel.startswith("\xCE\xFA\xED\xFE"), "Decompressed kernelcache does not start with 0xFEEDFACE"
patchs = patchs_ios5
if devclass in ["n82ap", "n72ap"]:
print "Using ARMv6 kernel patches"
patchs = patchs_armv6
elif manifest["ProductVersion"].startswith("4."):
print "Using iOS 4 kernel patches"
patchs = patchs_ios4
elif manifest["ProductVersion"].startswith("6."):
print "Using iOS 6 kernel patches"
patchs = patchs_ios6
if options.fixnand:
if patchs != patchs_ios4:
print "FAIL : use --fixnand with iOS 4.x IPSW"
return
patchs.update(patchs_ios4_fixnand)
kernelname = "fix_nand_" + kernelname
print "WARNING : only use this kernel to fix NAND epoch brick"
for p in patchs:
print "Doing %s patch" % p
s, r = patchs[p]
c = kernel.count(s)
if c != 1:
print "=> FAIL, count=%d, do not boot that kernel it wont work" % c
else:
kernel = kernel.replace(s,r)
outkernel = "%s.patched" % kernelname
open(outkernel, "wb").write(kernel)
print "Patched kernel written to %s" % outkernel
ramdiskname = manifest["BuildIdentities"][0]["Manifest"]["RestoreRamDisk"]["Info"]["Path"]
key,iv = keys.getKeyIV("Ramdisk")
ramdisk = ipsw.read(ramdiskname)
print "Decrypting %s" % ramdiskname
ramdisk = decryptImg3(ramdisk, key.decode("hex"), iv.decode("hex"))
assert ramdisk[0x400:0x402] == "H+", "H+ magic not found in decrypted ramdisk => bad key/iv ?"
customramdisk = "myramdisk_%s.dmg" % devclass
f = open(customramdisk, "wb")
f.write(ramdisk)
f.close()
if manifest["ProductVersion"].startswith("6."):
print "Run ./build_ramdisk_ios6.sh %s" % customramdisk
print "Then redsn0w -i %s -r %s -k %s -a \"-v rd=md0 amfi=0xff cs_enforcement_disable=1\"" % (ipswname, customramdisk, outkernel)
return
build_cmd = "./build_ramdisk.sh %s %s %s %s %s" % (ipswname, ramdiskname, key, iv, customramdisk)
rs_cmd = "redsn0w -i %s -r %s -k %s" % (ipswname, customramdisk, outkernel)
rdisk_script="""#!/bin/sh
for VER in 4.2 4.3 5.0 5.1 6.0
do
if [ -f "/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS$VER.sdk/System/Library/Frameworks/IOKit.framework/IOKit" ];
then
SDKVER=$VER
echo "Found iOS SDK $SDKVER"
break
fi
done
if [ "$SDKVER" == "" ]; then
echo "iOS SDK not found"
exit
fi
SDKVER=$SDKVER make -C ramdisk_tools
%s
if [ "$?" == "0" ]
then
echo "You can boot the ramdisk using the following command (fix paths)"
echo "%s"
echo "Add -a \\"-v rd=md0 nand-disable=1\\" for nand dump/read only access"
fi
""" % (build_cmd, rs_cmd)
scriptname="make_ramdisk_%s.sh" % devclass
f=open(scriptname, "wb")
f.write(rdisk_script)
f.close()
print "Created script %s, you can use it to (re)build the ramdisk"% scriptname
if __name__ == "__main__":
parser = OptionParser(usage="%prog [options] IPSW")
parser.add_option("-f", "--fixnand",
action="store_true", dest="fixnand", default=False,
help="Apply NAND epoch fix kernel patches")
(options, args) = parser.parse_args()
if len(args) < 1:
parser.print_help()
else:
main(args[0], options)

View File

@@ -0,0 +1,12 @@
import sqlite3
from keychain3 import Keychain3
from keychain4 import Keychain4
def keychain_load(filename, keybag, key835):
version = sqlite3.connect(filename).execute("SELECT version FROM tversion").fetchone()[0]
#print "Keychain version : %d" % version
if version == 3:
return Keychain3(filename, key835)
elif version >= 4:
return Keychain4(filename, keybag)
raise Exception("Unknown keychain version %d" % version)

View File

@@ -0,0 +1,237 @@
from store import PlistKeychain, SQLiteKeychain
from util import write_file
from util.asciitables import print_table
from util.bplist import BPlistReader
from util.cert import RSA_KEY_DER_to_PEM, CERT_DER_to_PEM
import M2Crypto
import hashlib
import plistlib
import sqlite3
import string
import struct
KSECATTRACCESSIBLE = {
6: "kSecAttrAccessibleWhenUnlocked",
7: "kSecAttrAccessibleAfterFirstUnlock",
8: "kSecAttrAccessibleAlways",
9: "kSecAttrAccessibleWhenUnlockedThisDeviceOnly",
10: "kSecAttrAccessibleAfterFirstUnlockThisDeviceOnly",
11: "kSecAttrAccessibleAlwaysThisDeviceOnly"
}
printset = set(string.printable)
def render_password(p):
data = p["data"]
if data != None and data.startswith("bplist") and data.find("\x00") != -1:
pl = BPlistReader.plistWithString(p["data"])
filename = "%s_%s_%d.plist" % (p["svce"],p["acct"],p["rowid"])
plistlib.writePlist(pl, filename)
#write_file("bin_"+filename, p["data"])
data = filename
if p.has_key("srvr"):
return "%s:%d;%s;%s" % (p["srvr"],p["port"],p["acct"],data)
else:
return "%s;%s;%s" % (p["svce"],p["acct"],data)
class Keychain(object):
def __init__(self, filename):
magic = open(filename, "rb").read(16)
if magic.startswith("SQLite"):
self.store = SQLiteKeychain(filename)
elif magic.startswith("bplist"):
self.store = PlistKeychain(filename)
else:
raise Exception("Unknown keychain format for %s" % filename)
self.bsanitize = True
self.items = {"genp": None, "inet": None, "cert": None, "keys": None}
def decrypt_data(self, data):
return data #override this method
def decrypt_item(self, res):
res["data"] = self.decrypt_data(res["data"])
if not res["data"]:
return {}
return res
def get_items(self, table):
if self.items[table]:
return self.items[table]
self.items[table] = filter(lambda x:x!={}, map(self.decrypt_item, self.store.get_items(table)))
return self.items[table]
def get_passwords(self):
return self.get_items("genp")
def get_inet_passwords(self):
return self.get_items("inet")
def get_keys(self):
return self.get_items("keys")
def get_cert(self):
return self.get_items("cert")
def get_certs(self):
certs = {}
pkeys = {}
keys = self.get_keys()
for row in self.get_cert():
cert = M2Crypto.X509.load_cert_der_string(row["data"])
subject = cert.get_subject().as_text()
common_name = cert.get_subject().get_entries_by_nid(M2Crypto.X509.X509_Name.nid['CN'])
if len(common_name):
subject = str(common_name[0].get_data())
else:
subject = "cn_unknown_%d" % row["rowid"]
certs[subject+ "_%s" % row["agrp"]] = cert
#print subject
#print "Access :\t" + KSECATTRACCESSIBLE.get(row["clas"])
for k in keys:
if k["agrp"] == row["agrp"] and k["klbl"] == row["pkhh"]:
pkey_der = k["data"]
pkey_der = RSA_KEY_DER_to_PEM(pkey_der)
pkeys[subject + "_%s" % row["agrp"]] = pkey_der
break
return certs, pkeys
def save_passwords(self):
passwords = "\n".join(map(render_password, self.get_passwords()))
inetpasswords = "\n".join(map(render_password, self.get_inet_passwords()))
print "Writing passwords to keychain.csv"
write_file("keychain.csv", "Passwords;;\n"+passwords+"\nInternet passwords;;\n"+ inetpasswords)
def save_certs_keys(self):
certs, pkeys = self.get_certs()
for c in certs:
filename = c + ".crt"
print "Saving certificate %s" % filename
certs[c].save_pem(filename)
for k in pkeys:
filename = k + ".key"
print "Saving key %s" % filename
write_file(filename, pkeys[k])
def sanitize(self, pw):
if pw.startswith("bplist"):
return "<binary plist data>"
elif not set(pw).issubset(printset):
pw = ">"+ pw.encode("hex")
#pw = "<binary data> : " + pw.encode("hex")
if self.bsanitize:
return pw[:2] + ("*" * (len(pw) - 2))
return pw
def print_all(self, sanitize=True):
self.bsanitize = sanitize
headers = ["Service", "Account", "Data", "Access group", "Protection class"]
rows = []
for p in self.get_passwords():
row = [p.get("svce","?"),
str(p.get("acct","?"))[:40],
self.sanitize(p.get("data","?"))[:20],
p.get("agrp","?"),
KSECATTRACCESSIBLE.get(p["clas"])[18:]]
rows.append(row)
print_table("Passwords", headers, rows)
headers = ["Server", "Account", "Data", "Access group", "Protection class"]
rows = []
for p in self.get_inet_passwords():
addr = "?"
if p.has_key("srvr"):
addr = p["srvr"] + ":" + str(p["port"])
row = [addr,
str(p.get("acct","?")),
self.sanitize(p.get("data","?"))[:20],
p.get("agrp","?"),
KSECATTRACCESSIBLE.get(p["clas"])[18:]]
rows.append(row)
print_table("Internet Passwords", headers, rows)
headers = ["Id", "Common Name", "Access group", "Protection class"]
rows = []
c = {}
for row in self.get_cert():
subject = "?"
if row.has_key("data"):
cert = M2Crypto.X509.load_cert_der_string(row["data"])
subject = cert.get_subject().as_text()
common_name = cert.get_subject().get_entries_by_nid(M2Crypto.X509.X509_Name.nid['CN'])
if len(common_name):
subject = str(common_name[0].get_data())
else:
subject = "cn_unknown_%d" % row["rowid"]
c[hashlib.sha1(str(row["pkhh"])).hexdigest() + row["agrp"]] = subject
row = [str(row["rowid"]),
subject[:81],
row.get("agrp","?")[:31],
KSECATTRACCESSIBLE.get(row["clas"])[18:]
]
rows.append(row)
print_table("Certificates", headers, rows)
headers = ["Id", "Label", "Common Name", "Access group", "Protection class"]
rows = []
for row in self.get_keys():
subject = ""
if row.has_key("klbl"):
subject = c.get(hashlib.sha1(str(row["klbl"])).hexdigest() + row["agrp"], "")
row = [str(row["rowid"]), row.get("labl", "?")[:30], subject[:39], row.get("agrp","?")[:31],
KSECATTRACCESSIBLE.get(row["clas"])[18:]]
rows.append(row)
print_table("Keys", headers, rows)
def get_push_token(self):
for p in self.get_passwords():
if p["svce"] == "push.apple.com":
return p["data"]
def get_managed_configuration(self):
for p in self.get_passwords():
if p["acct"] == "Private" and p["svce"] == "com.apple.managedconfiguration" and p["agrp"] == "apple":
return BPlistReader.plistWithString(p["data"])
def _diff(self, older, res, func, key):
res.setdefault(key, [])
current = func(self)
for p in func(older):
if not p in current and not p in res[key]:
res[key].append(p)
def diff(self, older, res):
self._diff(older, res, Keychain.get_passwords, "genp")
self._diff(older, res, Keychain.get_inet_passwords, "inet")
self._diff(older, res, Keychain.get_cert, "cert")
self._diff(older, res, Keychain.get_keys, "keys")
def cert(self, rowid, filename=""):
for row in self.get_cert():
if row["rowid"] == rowid:
blob = CERT_DER_to_PEM(row["data"])
if filename:
write_file(filename, blob)
cert = M2Crypto.X509.load_cert_der_string(row["data"])
print cert.as_text()
return
def key(self, rowid, filename=""):
for row in self.get_keys():
if row["rowid"] == rowid:
blob = RSA_KEY_DER_to_PEM(row["data"])
if filename:
write_file(filename, blob)
#k = M2Crypto.RSA.load_key_string(blob)
print blob
return

View File

@@ -0,0 +1,44 @@
from keychain import Keychain
from crypto.aes import AESdecryptCBC, AESencryptCBC
import hashlib
class Keychain3(Keychain):
def __init__(self, filename, key835=None):
Keychain.__init__(self, filename)
self.key835 = key835
def decrypt_data(self, data):
if data == None:
return ""
data = str(data)
if not self.key835:
print "Key 835 not availaible"
return ""
data = AESdecryptCBC(data[16:], self.key835, data[:16], padding=True)
#data_column = iv + AES128_K835(iv, data + sha1(data))
if hashlib.sha1(data[:-20]).digest() != data[-20:]:
print "data field hash mismatch : bad key ?"
return "ERROR decrypting data : bad key ?"
return data[:-20]
def change_key835(self, newkey):
tables = {"genp": "SELECT rowid, data FROM genp",
"inet": "SELECT rowid, data FROM inet",
"cert": "SELECT rowid, data FROM cert",
"keys": "SELECT rowid, data FROM keys"}
for t in tables.keys():
for row in self.conn.execute(tables[t]):
rowid = row["rowid"]
data = str(row["data"])
iv = data[:16]
data = AESdecryptCBC(data[16:], self.key835, iv)
data = AESencryptCBC(data, newkey, iv)
data = iv + data
data = buffer(data)
self.conn.execute("UPDATE %s SET data=? WHERE rowid=?" % t, (data, rowid))
self.conn.commit()

View File

@@ -0,0 +1,92 @@
from crypto.aes import AESdecryptCBC
import struct
"""
iOS 4 keychain-2.db data column format
version 0x00000000
key class 0x00000008
kSecAttrAccessibleWhenUnlocked 6
kSecAttrAccessibleAfterFirstUnlock 7
kSecAttrAccessibleAlways 8
kSecAttrAccessibleWhenUnlockedThisDeviceOnly 9
kSecAttrAccessibleAfterFirstUnlockThisDeviceOnly 10
kSecAttrAccessibleAlwaysThisDeviceOnly 11
wrapped AES256 key 0x28 bytes (passed to kAppleKeyStoreKeyUnwrap)
encrypted data (AES 256 CBC zero IV)
"""
from keychain import Keychain
from crypto.gcm import gcm_decrypt
from util.bplist import BPlistReader
KSECATTRACCESSIBLE = {
6: "kSecAttrAccessibleWhenUnlocked",
7: "kSecAttrAccessibleAfterFirstUnlock",
8: "kSecAttrAccessibleAlways",
9: "kSecAttrAccessibleWhenUnlockedThisDeviceOnly",
10: "kSecAttrAccessibleAfterFirstUnlockThisDeviceOnly",
11: "kSecAttrAccessibleAlwaysThisDeviceOnly"
}
class Keychain4(Keychain):
def __init__(self, filename, keybag):
if not keybag.unlocked:
print "Keychain object created with locked keybag, some items won't be decrypted"
Keychain.__init__(self, filename)
self.keybag = keybag
def decrypt_item(self, row):
version, clas = struct.unpack("<LL", row["data"][0:8])
if self.keybag.isBackupKeybag():
if clas >= 9 and not self.keybag.deviceKey:
return {}
if version >= 2:
dict = self.decrypt_blob(row["data"])
if not dict:
return {"clas": clas, "rowid": row["rowid"]}
if dict.has_key("v_Data"):
dict["data"] = dict["v_Data"].data
else:
dict["data"] = ""
dict["rowid"] = row["rowid"]
dict["clas"] = clas
return dict
row["clas"] = clas
return Keychain.decrypt_item(self, row)
def decrypt_data(self, data):
data = self.decrypt_blob(data)
if type(data) == dict:
return data["v_Data"].data
return data
def decrypt_blob(self, blob):
if blob == None:
return ""
if len(blob) < 48:
print "keychain blob length must be >= 48"
return
version, clas = struct.unpack("<LL",blob[0:8])
self.clas=clas
if version == 0:
wrappedkey = blob[8:8+40]
encrypted_data = blob[48:]
elif version == 2:
l = struct.unpack("<L",blob[8:12])[0]
wrappedkey = blob[12:12+l]
encrypted_data = blob[12+l:-16]
else:
raise Exception("unknown keychain verson ", version)
return
unwrappedkey = self.keybag.unwrapKeyForClass(clas, wrappedkey, False)
if not unwrappedkey:
return
if version == 0:
return AESdecryptCBC(encrypted_data, unwrappedkey, padding=True)
elif version == 2:
binaryplist = gcm_decrypt(unwrappedkey, "", encrypted_data, "", blob[-16:])
return BPlistReader(binaryplist).parse()

View File

@@ -0,0 +1,27 @@
"""
0
1:MCSHA256DigestWithSalt
2:SecKeyFromPassphraseDataHMACSHA1
"""
from crypto.PBKDF2 import PBKDF2
import plistlib
import hashlib
SALT1 = "F92F024CA2CB9754".decode("hex")
hashMethods={
1: (lambda p,salt:hashlib.sha256(SALT1 + p)),
2: (lambda p,salt:PBKDF2(p, salt, iterations=1000).read(20))
}
def bruteforce_old_pass(h):
salt = h["salt"].data
hash = h["hash"].data
f = hashMethods.get(h["hashMethod"])
if f:
print "Bruteforcing hash %s (4 digits)" % hash.encode("hex")
for i in xrange(10000):
p = "%04d" % (i % 10000)
if f(p,salt) == hash:
return p

View File

@@ -0,0 +1,56 @@
import plistlib
import sqlite3
import struct
from util import readPlist
class KeychainStore(object):
def __init__(self):
pass
def convertDict(self, d):
return d
def returnResults(self, r):
for a in r:
yield self.convertDict(a)
def get_items(self, table):
return []
class SQLiteKeychain(KeychainStore):
def __init__(self, filename):
self.conn = sqlite3.connect(filename)
self.conn.row_factory = sqlite3.Row
def convertDict(self, row):
d = dict(row)
for k,v in d.items():
if type(v) == buffer:
d[k] = str(v)
return d
def get_items(self, table):
sql = {"genp": "SELECT rowid, data, svce, acct, agrp FROM genp",
"inet": "SELECT rowid, data, acct, srvr, port, agrp FROM inet",
"cert": "SELECT rowid, data, pkhh, agrp FROM cert",
"keys": "SELECT rowid, data, klbl, agrp FROM keys"}
return self.returnResults(self.conn.execute(sql[table]))
class PlistKeychain(KeychainStore):
def __init__(self, filename):
self.plist = readPlist(filename)
def convertDict(self, d):
for k, v in d.items():
if isinstance(v, plistlib.Data):
if k == "v_Data":
d["data"] = v.data
elif k == "v_PersistentRef":
#format tablename (4 chars) + rowid (64 bits)
d["rowid"] = struct.unpack("<Q", v.data[-8:])[0]
else:
d[k] = v.data
return d
def get_items(self, table):
return self.returnResults(self.plist.get(table, []))

View File

@@ -0,0 +1,72 @@
from optparse import OptionParser
from keystore.keybag import Keybag
from keychain import keychain_load
from keychain.managedconfiguration import bruteforce_old_pass
from util import readPlist
from keychain.keychain4 import Keychain4
import plistlib
def main():
parser = OptionParser(usage="%prog keychain.db/keychain-backup.plist keyfile.plist/Manifest.plist")
parser.add_option("-d", "--display", dest="display", action="store_true", default=False,
help="Show keychain items on stdout")
parser.add_option("-s", "--sanitize", dest="sanitize", action="store_true", default=False,
help="Hide secrets on stdout with ***")
parser.add_option("-p", "--passwords", dest="passwords", action="store_true", default=False,
help="Save generic & internet passwords as CSV file")
parser.add_option("-c", "--certs", dest="certs", action="store_true", default=False,
help="Extract certificates and keys")
parser.add_option("-o", "--old", dest="oldpass", action="store_true", default=False,
help="Bruteforce old passcodes")
(options, args) = parser.parse_args()
if len(args) < 2:
parser.print_help()
return
p = readPlist(args[1])
if p.has_key("BackupKeyBag"):
deviceKey = None
if p.has_key("key835"):
deviceKey = p["key835"].decode("hex")
else:
if not p["IsEncrypted"]:
print "This backup is not encrypted, without key 835 nothing in the keychain can be decrypted"
print "If you have key835 for device %s enter it (in hex)" % p["Lockdown"]["UniqueDeviceID"]
d = raw_input()
if len(d) == 32:
p["key835"] = d
deviceKey = d.decode("hex")
plistlib.writePlist(p, args[1])
kb = Keybag.createWithBackupManifest(p, p.get("password",""), deviceKey)
if not kb:
return
k = Keychain4(args[0], kb)
else:
kb = Keybag.createWithPlist(p)
k = keychain_load(args[0], kb, p["key835"].decode("hex"))
if options.display:
k.print_all(options.sanitize)
if options.passwords:
k.save_passwords()
if options.certs:
k.save_certs_keys()
if options.oldpass:
mc = k.get_managed_configuration()
if not mc:
print "Managed configuration not found"
return
print "Bruteforcing %d old passcodes" % len(mc.get("history",[]))
for h in mc["history"]:
p = bruteforce_old_pass(h)
if p:
print "Found : %s" % p
else:
print "Not Found"
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,69 @@
from construct import RepeatUntil
from construct.core import Struct, Union
from construct.macros import *
from crypto.aes import AESdecryptCBC
from crypto.aeswrap import AESUnwrap
from zipfile import crc32
import struct
Dkey = 0x446B6579
EMF = 0x454D4621
BAG1 = 0x42414731
DONE = 0x444f4e45 #locker sentinel
#**** = 0x2A2A2A2A #wildcard for erase
#MAGIC (kL) | LEN (2bytes) | TAG (4) | DATA (LEN)
Locker = Struct("Locker",
String("magic",2),
ULInt16("length"),
Union("tag",
ULInt32("int"),
String("tag",4))
,
String("data", lambda ctx: ctx["length"])
)
Lockers = RepeatUntil(lambda obj, ctx: obj.tag.int == DONE, Locker)
def xor_strings(s, key):
res = ""
for i in xrange(len(s)):
res += chr(ord(s[i]) ^ ord(key[i%len(key)]))
return res
def check_effaceable_header(plog):
z = xor_strings(plog[:16], plog[16:32])
if z[:4] != "ecaF":
return False
plog_generation = struct.unpack("<L", plog[0x38:0x3C])[0]
print "Effaceable generation" , plog_generation
plog_crc = crc32(plog[0x40:0x40 + 960], crc32(plog[0x20:0x3C], crc32(z))) & 0xffffffff
assert plog_crc == struct.unpack("<L", plog[0x3C:0x40])[0] , "Effaceable CRC"
print "Effaceable CRC OK"
return True
class EffaceableLockers(object):
def __init__(self, data):
self.lockers = {}
for l in Lockers.parse(data):
tag = l.tag.int & ~0x80000000
tag = struct.pack("<L", tag)[::-1]
self.lockers[tag] = l.data
def display(self):
print "Lockers : " + ", ".join(sorted(self.lockers.keys()))
def get(self, tag):
return self.lockers.get(tag)
def get_DKey(self, k835):
if self.lockers.has_key("Dkey"):
return AESUnwrap(k835, self.lockers["Dkey"])
def get_EMF(self, k89b):
if self.lockers.has_key("LwVM"):
lwvm = AESdecryptCBC(self.lockers["LwVM"], k89b)
return lwvm[-32:]
elif self.lockers.has_key("EMF!"):
return AESdecryptCBC(self.lockers["EMF!"][4:], k89b)

View File

@@ -0,0 +1,265 @@
from crypto.PBKDF2 import PBKDF2
from crypto.aes import AESdecryptCBC
from crypto.aeswrap import AESUnwrap
from crypto.aeswrap import AESwrap
from crypto.curve25519 import curve25519
from hashlib import sha256, sha1
from util.bplist import BPlistReader
from util.tlv import loopTLVBlocks, tlvToDict
import hmac
import struct
KEYBAG_TAGS = ["VERS", "TYPE", "UUID", "HMCK", "WRAP", "SALT", "ITER"]
CLASSKEY_TAGS = ["CLAS","WRAP","WPKY", "KTYP", "PBKY"] #UUID
KEYBAG_TYPES = ["System", "Backup", "Escrow", "OTA (icloud)"]
SYSTEM_KEYBAG = 0
BACKUP_KEYBAG = 1
ESCROW_KEYBAG = 2
OTA_KEYBAG = 3
#ORed flags in TYPE since iOS 5
FLAG_UIDPLUS = 0x40000000 # UIDPlus hardware key (>= iPad 3)
FLAG_UNKNOWN = 0x80000000
WRAP_DEVICE = 1
WRAP_PASSCODE = 2
KEY_TYPES = ["AES", "Curve25519"]
PROTECTION_CLASSES={
1:"NSFileProtectionComplete",
2:"NSFileProtectionCompleteUnlessOpen",
3:"NSFileProtectionCompleteUntilFirstUserAuthentication",
4:"NSFileProtectionNone",
5:"NSFileProtectionRecovery?",
6: "kSecAttrAccessibleWhenUnlocked",
7: "kSecAttrAccessibleAfterFirstUnlock",
8: "kSecAttrAccessibleAlways",
9: "kSecAttrAccessibleWhenUnlockedThisDeviceOnly",
10: "kSecAttrAccessibleAfterFirstUnlockThisDeviceOnly",
11: "kSecAttrAccessibleAlwaysThisDeviceOnly"
}
"""
device key : key 0x835
"""
class Keybag(object):
def __init__(self, data):
self.type = None
self.uuid = None
self.wrap = None
self.deviceKey = None
self.unlocked = False
self.passcodeComplexity = 0
self.attrs = {}
self.classKeys = {}
self.KeyBagKeys = None #DATASIGN blob
self.parseBinaryBlob(data)
@staticmethod
def getSystemkbfileWipeID(filename):
mkb = BPlistReader.plistWithFile(filename)
return mkb["_MKBWIPEID"]
@staticmethod
def createWithPlist(pldict):
k835 = pldict.key835.decode("hex")
data = ""
if pldict.has_key("KeyBagKeys"):
data = pldict["KeyBagKeys"].data
else:
data = ""
keybag = Keybag.createWithDataSignBlob(data, k835)
if pldict.has_key("passcodeKey"):
if keybag.unlockWithPasscodeKey(pldict["passcodeKey"].decode("hex")):
print "Keybag unlocked with passcode key"
else:
print "FAILed to unlock keybag with passcode key"
#HAX: inject DKey
keybag.setDKey(pldict)
return keybag
def setDKey(self, device_infos):
self.classKeys[4] = {"CLAS": 4, "KEY": device_infos["DKey"].decode("hex")}
@staticmethod
def createWithSystemkbfile(filename, bag1key, deviceKey=None):
if filename.startswith("bplist"): #HAX
mkb = BPlistReader.plistWithString(filename)
else:
mkb = BPlistReader.plistWithFile(filename)
try:
decryptedPlist = AESdecryptCBC(mkb["_MKBPAYLOAD"].data, bag1key, mkb["_MKBIV"].data, padding=True)
except:
print "FAIL: AESdecryptCBC _MKBPAYLOAD => wrong BAG1 key ?"
return None
if not decryptedPlist.startswith("bplist"):
print "FAIL: decrypted _MKBPAYLOAD is not bplist"
return None
decryptedPlist = BPlistReader.plistWithString(decryptedPlist)
blob = decryptedPlist["KeyBagKeys"].data
kb = Keybag.createWithDataSignBlob(blob, deviceKey)
if decryptedPlist.has_key("OpaqueStuff"):
OpaqueStuff = BPlistReader.plistWithString(decryptedPlist["OpaqueStuff"].data)
kb.passcodeComplexity = OpaqueStuff.get("keyboardType")
return kb
@staticmethod
def createWithDataSignBlob(blob, deviceKey=None):
keybag = tlvToDict(blob)
kb = Keybag(keybag.get("DATA", ""))
kb.deviceKey = deviceKey
kb.KeyBagKeys = blob
kb.unlockAlwaysAccessible()
if len(keybag.get("SIGN", "")):
hmackey = AESUnwrap(deviceKey, kb.attrs["HMCK"])
#hmac key and data are swapped (on purpose or by mistake ?)
sigcheck = hmac.new(key=keybag["DATA"], msg=hmackey, digestmod=sha1).digest()
#fixed in ios 7
if kb.attrs["VERS"] >= 4:
sigcheck = hmac.new(key=hmackey, msg=keybag["DATA"], digestmod=sha1).digest()
if sigcheck != keybag.get("SIGN", ""):
print "Keybag: SIGN check FAIL"
return kb
@staticmethod
def createWithBackupManifest(manifest, password, deviceKey=None):
kb = Keybag(manifest["BackupKeyBag"].data)
kb.deviceKey = deviceKey
if not kb.unlockBackupKeybagWithPasscode(password):
print "Cannot decrypt backup keybag. Wrong password ?"
return
return kb
def isBackupKeybag(self):
return self.type == BACKUP_KEYBAG
def parseBinaryBlob(self, data):
currentClassKey = None
for tag, data in loopTLVBlocks(data):
if len(data) == 4:
data = struct.unpack(">L", data)[0]
if tag == "TYPE":
self.type = data & 0x3FFFFFFF #ignore the flags
if self.type > 3:
print "FAIL: keybag type > 3 : %d" % self.type
elif tag == "UUID" and self.uuid is None:
self.uuid = data
elif tag == "WRAP" and self.wrap is None:
self.wrap = data
elif tag == "UUID":
if currentClassKey:
self.classKeys[currentClassKey["CLAS"]] = currentClassKey
currentClassKey = {"UUID": data}
elif tag in CLASSKEY_TAGS:
currentClassKey[tag] = data
else:
self.attrs[tag] = data
if currentClassKey:
self.classKeys[currentClassKey["CLAS"]] = currentClassKey
def getPasscodekeyFromPasscode(self, passcode):
if self.type == BACKUP_KEYBAG or self.type == OTA_KEYBAG:
return PBKDF2(passcode, self.attrs["SALT"], iterations=self.attrs["ITER"]).read(32)
else:
#Warning, need to run derivation on device with this result
return PBKDF2(passcode, self.attrs["SALT"], iterations=1).read(32)
def unlockBackupKeybagWithPasscode(self, passcode):
if self.type != BACKUP_KEYBAG and self.type != OTA_KEYBAG:
print "unlockBackupKeybagWithPasscode: not a backup keybag"
return False
return self.unlockWithPasscodeKey(self.getPasscodekeyFromPasscode(passcode))
def unlockAlwaysAccessible(self):
for classkey in self.classKeys.values():
k = classkey["WPKY"]
if classkey["WRAP"] == WRAP_DEVICE:
if not self.deviceKey:
continue
k = AESdecryptCBC(k, self.deviceKey)
classkey["KEY"] = k
return True
def unlockWithPasscodeKey(self, passcodekey):
if self.type != BACKUP_KEYBAG and self.type != OTA_KEYBAG:
if not self.deviceKey:
print "ERROR, need device key to unlock keybag"
return False
for classkey in self.classKeys.values():
if not classkey.has_key("WPKY"):
continue
k = classkey["WPKY"]
if classkey["WRAP"] & WRAP_PASSCODE:
k = AESUnwrap(passcodekey, classkey["WPKY"])
if not k:
return False
if classkey["WRAP"] & WRAP_DEVICE:
if not self.deviceKey:
continue
k = AESdecryptCBC(k, self.deviceKey)
classkey["KEY"] = k
self.unlocked = True
return True
def unwrapCurve25519(self, persistent_class, persistent_key):
assert len(persistent_key) == 0x48
#assert persistent_class == 2 #NSFileProtectionCompleteUnlessOpen
mysecret = self.classKeys[persistent_class]["KEY"]
mypublic = self.classKeys[persistent_class]["PBKY"]
hispublic = persistent_key[:32]
shared = curve25519(mysecret, hispublic)
md = sha256('\x00\x00\x00\x01' + shared + hispublic + mypublic).digest()
return AESUnwrap(md, persistent_key[32:])
def unwrapKeyForClass(self, clas, persistent_key, printError=True):
if not self.classKeys.has_key(clas) or not self.classKeys[clas].has_key("KEY"):
if printError: print "Keybag key %d missing or locked" % clas
return ""
ck = self.classKeys[clas]["KEY"]
#if self.attrs.get("VERS", 2) >= 3 and clas == 2:
if self.attrs.get("VERS", 2) >= 3 and self.classKeys[clas].get("KTYP", 0) == 1:
return self.unwrapCurve25519(clas, persistent_key)
if len(persistent_key) == 0x28:
return AESUnwrap(ck, persistent_key)
return
def wrapKeyForClass(self, clas, persistent_key):
if not self.classKeys.has_key(clas) or not self.classKeys[clas].has_key("KEY"):
print "Keybag key %d missing or locked" % clas
return ""
ck = self.classKeys[clas]["KEY"]
return AESwrap(ck, persistent_key)
def printClassKeys(self):
print "Keybag type : %s keybag (%d)" % (KEYBAG_TYPES[self.type], self.type)
print "Keybag version : %d" % self.attrs["VERS"]
print "Keybag UUID : %s" % self.uuid.encode("hex")
print "-"*128
print "".join(["Class".ljust(53),
"WRAP".ljust(5),
"Type".ljust(11),
"Key".ljust(65),
"Public key"])
print "-"*128
for k, ck in self.classKeys.items():
if k == 6: print ""
print "".join([PROTECTION_CLASSES.get(k).ljust(53),
str(ck.get("WRAP","")).ljust(5),
KEY_TYPES[ck.get("KTYP",0)].ljust(11),
ck.get("KEY", "").encode("hex").ljust(65),
ck.get("PBKY", "").encode("hex")])
print ""
def getClearClassKeysDict(self):
if self.unlocked:
d = {}
for ck in self.classKeys.values():
d["%d" % ck["CLAS"]] = ck.get("KEY","").encode("hex")
return d

View File

@@ -0,0 +1,381 @@
from crypto.aes import AESdecryptCBC, AESencryptCBC
from hfs.emf import cprotect_xattr, EMFVolume
from hfs.hfs import HFSVolume, hfs_date, HFSFile
from hfs.journal import carveBtreeNode, isDecryptedCorrectly
from hfs.structs import *
from util import sizeof_fmt, makedirs, hexdump
import hashlib
import os
import struct
class NANDCarver(object):
def __init__(self, volume, image, outputdir=None):
self.volume = volume
self.image = image
self.nand = image
self.ftlhax = False
self.userblocks = None
self.lpnToVpn = None
self.files = {}
self.keys = {}
self.encrypted = image.encrypted and hasattr(volume, "emfkey")
self.encrypted = hasattr(volume, "cp_root") and volume.cp_root != None
if outputdir == None:
if image.filename != "remote": outputdir = os.path.join(os.path.dirname(image.filename), "undelete")
else: outputdir = os.path.join(".", "undelete")
print "Carver output %s" % outputdir
self.outputdir = outputdir
self.okfiles = 0
self.first_lba = self.volume.bdev.lbaoffset
self.pageSize = image.pageSize
self.blankPage = "\xDE\xAD\xBE\xEF" * (self.pageSize/4)
self.emfkey = None
self.fileIds = None
self.fastMode = False
if hasattr(volume, "emfkey"):
self.emfkey = volume.emfkey
def carveFile(self, hfsfile, callback, lbas=None, filter_=None):
for e in hfsfile.extents:
if e.blockCount == 0:
break
for i in xrange(e.startBlock, e.startBlock+e.blockCount):
if lbas and not i in lbas:
continue
if self.fastMode:
for vpn in self.ftlhax.get(self.first_lba+i, []):
usn = 0
s,d = self.nand.ftl.YAFTL_readPage(vpn, self.emfkey, self.first_lba+i)
callback(d, usn, filter_)
else:
# s,d = self.nand.ftl.YAFTL_readPage(vpn, self.emfkey, self.first_lba+i)
# callback(d, 0)
usnsForLbn = self.ftlhax.get(self.first_lba+i, [])
for usn in sorted(usnsForLbn.keys())[:-1]:
for vpn in usnsForLbn[usn]:
s,d = self.nand.ftl.YAFTL_readPage(vpn, self.emfkey, self.first_lba+i)
callback(d, usn, filter_)
def _catalogFileCallback(self, data, usn, filter_):
for k,v in carveBtreeNode(data,HFSPlusCatalogKey, HFSPlusCatalogData):
if v.recordType != kHFSPlusFileRecord:
continue
if filter_ and not filter_(k,v):
continue
name = getString(k)
#if not self.filterFileName(name):
# continue
h = hashlib.sha1(HFSPlusCatalogKey.build(k)).digest()
if self.files.has_key(h):
continue
if not self.fileIds.has_key(v.data.fileID):
print "Found deleted file record", v.data.fileID, name.encode("utf-8"), "created", hfs_date(v.data.createDate)
self.files[h] = (name,v, usn)
def _attributesFileCallback(self, data, usn, filter_):
for k,v in carveBtreeNode(data,HFSPlusAttrKey, HFSPlusAttrData):
if getString(k) != "com.apple.system.cprotect":
continue
if self.fileIds.has_key(k.fileID):
continue
filekeys = self.keys.setdefault(k.fileID, [])
try:
cprotect = cprotect_xattr.parse(v.data)
except:
continue
if cprotect.key_size == 0:
continue
filekey = self.volume.keybag.unwrapKeyForClass(cprotect.persistent_class, cprotect.persistent_key, False)
if filekey and not filekey in filekeys:
#print "Found key for file ID ", k.fileID
filekeys.append(filekey)
def carveCatalog(self, lbas=None, filter_=None):
return self.carveFile(self.volume.catalogFile, self._catalogFileCallback, lbas, filter_)
def carveKeys(self, lbas=None):
return self.carveFile(self.volume.xattrFile, self._attributesFileCallback, lbas)
def pagesForLBN(self, lbn):
return self.ftlhax.get(self.first_lba + lbn, {})
def decryptFileBlock(self, pn, filekey, lbn, decrypt_offset):
s, ciphertext = self.nand.ftl.YAFTL_readPage(pn, None, lbn)
if not self.encrypted:
return ciphertext
if not self.image.isIOS5():
return AESdecryptCBC(ciphertext, filekey, self.volume.ivForLBA(lbn))
clear = ""
ivkey = hashlib.sha1(filekey).digest()[:16]
for i in xrange(len(ciphertext)/0x1000):
iv = self.volume.ivForLBA(decrypt_offset, False)
iv = AESencryptCBC(iv, ivkey)
clear += AESdecryptCBC(ciphertext[i*0x1000:(i+1)*0x1000], filekey, iv)
decrypt_offset += 0x1000
return clear
def writeUndeletedFile(self, filename, data):
knownExtensions = (".m4a", ".plist",".sqlite",".sqlitedb", ".jpeg", ".jpg", ".png", ".db",".json",".xml",".sql")
#windows invalid chars \/:*?"<>|
filename = str(filename.encode("utf-8")).translate(None, "\\/:*?\"<>|,")
folder = self.outputdir
if self.outputdir == "./":
folder = folder + "/undelete"
elif filename.lower().endswith(knownExtensions):
ext = filename[filename.rfind(".")+1:]
folder = folder + "/" + ext.lower()
makedirs(folder)
open(folder + "/" + filename, "wb").write(data)
def getFileAtUSN(self, filename, filerecord, filekey, usn, previousVersion=None, exactSize=True):
missing_pages = 0
decrypt_offset = 0
file_pages = []
logicalSize = filerecord.dataFork.logicalSize
for extent in self.volume.getAllExtents(filerecord.dataFork, filerecord.fileID):
for bn in xrange(extent.startBlock, extent.startBlock + extent.blockCount):
pn = self.pagesForLBN(bn).get(usn) #fail
if pn:
clear = self.decryptFileBlock(pn[-1], filekey, bn, decrypt_offset)
file_pages.append(clear)
elif previousVersion:
file_pages.append(previousVersion[len(file_pages)])
else:
file_pages.append(self.blankPage)
missing_pages += 1
decrypt_offset += self.pageSize
print "Recovered %d:%s %d missing pages, size %s, created %s, contentModDate %s" % \
(filerecord.fileID, filename.encode("utf-8"), missing_pages, sizeof_fmt(logicalSize), hfs_date(filerecord.createDate), hfs_date(filerecord.contentModDate))
filename = "%d_%d_%s" % (filerecord.fileID, usn, filename)
if missing_pages == 0:
filename = "OK_" + filename
self.okfiles += 1
data = "".join(file_pages)
if exactSize:
data = data[:logicalSize]
self.writeUndeletedFile(filename, data)
return file_pages
#test for SQLite
def rollbackExistingFile(self, filename):
filerecord = self.volume.getFileRecordForPath(filename)
filekey = self.volume.getFileKeyForFileId(filerecord.fileID)
print "filekey", filekey.encode("hex")
z = None
for extent in filerecord.dataFork.HFSPlusExtentDescriptor:
for bn in xrange(extent.startBlock, extent.startBlock + extent.blockCount):
pages = self.pagesForLBN(bn)
print pages
for usn in sorted(pages.keys()):
d = self.decryptFileBlock(pages[usn][-1], filekey, bn, 0)
if d.startswith("SQL") or True:
filechangecounter = struct.unpack(">L", d[24:28])
print usn, "OK", filechangecounter
z = self.getFileAtUSN(os.path.basename(filename), filerecord, filekey, usn, z)
else:
print usn, "FAIL"
return
def filterFileName(self, filename):
return filename.lower().endswith(".jpg")
def getExistingFileIDs(self):
print "Collecting existing file ids"
self.fileIds = self.volume.listAllFileIds()
print "%d file IDs" % len(self.fileIds.keys())
def carveDeletedFiles_fast(self, catalogLBAs=None, filter_=None):
self.fastMode = True
if not self.ftlhax:
hax, userblocks = self.nand.ftl.YAFTL_lookup1()
self.ftlhax = hax
self.userblocks = userblocks
self.files = {}
if not self.fileIds:
self.getExistingFileIDs()
print "Carving catalog file"
#catalogLBAs = None
self.carveCatalog(catalogLBAs, filter_)
keysLbas = []
for name, vv, usn in self.files.values():
for i in xrange(vv.data.fileID, vv.data.fileID + 100):
if self.volume.xattrTree.search((i, "com.apple.system.cprotect")):
keysLbas.extend(self.volume.xattrTree.getLBAsHax())
break
#print "keysLbas", keysLbas
if self.encrypted and len(self.keys) == 0:
print "Carving attribute file for file keys"
#self.carveKeys(keysLbas)
self.carveKeys()
self.okfiles = 0
total = 0
print "%d files, %d keys" % (len(self.files), len(self.keys))
for name, vv, usn in self.files.values():
if not self.keys.has_key(vv.data.fileID):
print "No file key for %s" % name.encode("utf-8")
keys = set(self.keys.get(vv.data.fileID, [self.emfkey]))
print "%s" % (name.encode("utf-8"))
if self.readFileHax(name, vv.data, keys):
total += 1
print "Carving done, recovered %d deleted files, %d are most likely OK" % (total, self.okfiles)
def carveDeleteFiles_slow(self, catalogLBAs=None, filter_=None):
self.fastMode = False
self.files = {}
if not self.ftlhax:
self.ftlhax = self.nand.ftl.YAFTL_hax2()
if not self.fileIds:
self.getExistingFileIDs()
if self.encrypted and len(self.keys) == 0:
print "Carving attribute file for file keys"
self.carveKeys()
print "Carving catalog file"
self.carveCatalog(catalogLBAs, filter_)
self.okfiles = 0
total = 0
print "%d files" % len(self.files)
for name, vv, usn in self.files.values():
keys = set(self.keys.get(vv.data.fileID, [self.emfkey]))
print "%s num keys = %d" % (name, len(keys))
good_usn = 0
for filekey in keys:
if good_usn:
break
first_block = vv.data.dataFork.HFSPlusExtentDescriptor[0].startBlock
for usn, pn in self.pagesForLBN(first_block).items():
d = self.decryptFileBlock(pn[-1], filekey, self.first_lba+first_block, 0)
if isDecryptedCorrectly(d):
#print "USN for first block : ", usn
good_usn = usn
break
if good_usn == 0:
continue
self.getFileAtUSN(name, vv.data, filekey, good_usn)
def getBBTOC(self, block):
btoc = self.nand.ftl.readBTOCPages(block, self.nand.ftl.totalPages)
if not btoc:
return self.nand.ftl.block_lpn_to_vpn(block)
bbtoc = {}
for i in xrange(len(btoc)):
bbtoc[btoc[i]] = block*self.nand.ftl.vfl.pages_per_sublk + i
return bbtoc
def readFileHax(self, filename, filerecord, filekeys):
lba0 = self.first_lba + filerecord.dataFork.HFSPlusExtentDescriptor[0].startBlock
filekey = None
good_usn = None
first_vpn = 0
first_usn = 0
hax = self.ftlhax
print "%d versions for first lba" % len(hax.get(lba0, []))
for k in filekeys:
for vpn in hax.get(lba0, []):
s, ciphertext = self.nand.ftl.YAFTL_readPage(vpn, key=None, lpn=None)
if not ciphertext:
continue
d = self.decryptFileBlock2(ciphertext, k, lba0, 0)
#hexdump(d[:16])
if isDecryptedCorrectly(d):
filekey = k
first_vpn = vpn
first_usn = good_usn = s.usn
block = vpn / self.nand.ftl.vfl.pages_per_sublk
break
if not filekey:
return False
logicalSize = filerecord.dataFork.logicalSize
missing_pages = 0
file_pages = []
lbns = []
for extent in self.volume.getAllExtents(filerecord.dataFork, filerecord.fileID):
for bn in xrange(extent.startBlock, extent.startBlock + extent.blockCount):
lbns.append(self.first_lba + bn)
datas = {}
usnblocksToLookAT = sorted(filter(lambda x: x >= good_usn, self.userblocks.keys()))[:5]
print usnblocksToLookAT
usnblocksToLookAT.insert(0, 0)
first_block = True
done = False
for usn in usnblocksToLookAT:
if first_block:
bbtoc = self.getBBTOC(block)
first_block = False
else:
bbtoc = self.getBBTOC(self.userblocks[usn])
for lbn in bbtoc.keys():
if not lbn in lbns:
continue
idx = lbns.index(lbn)
s, ciphertext = self.nand.ftl.YAFTL_readPage(bbtoc[lbn], key=None, lpn=None)
if not ciphertext:
continue
ciphertext = self.decryptFileBlock2(ciphertext, filekey, lbn, idx*self.pageSize)
if idx == 0:
if not isDecryptedCorrectly(ciphertext):
continue
datas[idx*self.pageSize] = (ciphertext, lbn - self.first_lba)
#if idx == len(lbns):
if len(datas) == len(lbns):
done=True
break
if done:
break
cleartext = ""
decrypt_offset = 0
for i in xrange(0,logicalSize, self.pageSize):
if datas.has_key(i):
ciphertext, lbn = datas[i]
cleartext += ciphertext
else:
cleartext += self.blankPage
missing_pages += 1
decrypt_offset += self.pageSize
print "Recovered %d:%s %d missing pages, size %s, created %s, contentModDate %s" % \
(filerecord.fileID, filename.encode("utf-8"), missing_pages, sizeof_fmt(logicalSize), hfs_date(filerecord.createDate), hfs_date(filerecord.contentModDate))
filename = "%d_%d_%s" % (filerecord.fileID, first_usn, filename)
if missing_pages == 0:
filename = "OK_" + filename
self.okfiles += 1
if True:#exactSize:
cleartext = cleartext[:logicalSize]
self.writeUndeletedFile(filename, cleartext)
return True
def decryptFileBlock2(self, ciphertext, filekey, lbn, decrypt_offset):
if not self.encrypted:
return ciphertext
if not self.image.isIOS5():
return AESdecryptCBC(ciphertext, filekey, self.volume.ivForLBA(lbn, add=False))
clear = ""
ivkey = hashlib.sha1(filekey).digest()[:16]
for i in xrange(len(ciphertext)/0x1000):
iv = self.volume.ivForLBA(decrypt_offset, False)
iv = AESencryptCBC(iv, ivkey)
clear += AESdecryptCBC(ciphertext[i*0x1000:(i+1)*0x1000], filekey, iv)
decrypt_offset += 0x1000
return clear
def getFileRanges(self, hfsfile):
res = []
for e in hfsfile.extents:
if e.blockCount == 0:
break
res.append(xrange(e.startBlock, e.startBlock+e.blockCount))
return res
def readFSPage(self, vpn, lba):
s,d = self.nand.ftl.YAFTL_readPage(vpn, self.emfkey, self.first_lba+lba)
if s:
return d

View File

@@ -0,0 +1,86 @@
import os
import struct
import sys
"""
row-by-row dump
page = data + spare metadata + iokit return code + iokit return code 2
"""
class NANDImageFlat(object):
def __init__(self, filename, geometry):
flags = os.O_RDONLY
if sys.platform == "win32":
flags |= os.O_BINARY
self.fd = os.open(filename, flags)
self.nCEs = geometry["#ce"]
self.pageSize = geometry["#page-bytes"]
self.metaSize = geometry.get("meta-per-logical-page", 12)
self.dumpedPageSize = geometry.get("dumpedPageSize", self.pageSize + self.metaSize + 8)
self.hasIOKitStatus = True
if self.dumpedPageSize == self.pageSize + geometry["#spare-bytes"] or self.dumpedPageSize == self.pageSize + self.metaSize:
self.hasIOKitStatus = False
self.blankPage = "\xFF" * self.pageSize
self.blankSpare = "\xFF" * self.metaSize
self.imageSize = os.path.getsize(filename)
expectedSize = geometry["#ce"] * geometry["#ce-blocks"] * geometry["#block-pages"] * self.dumpedPageSize
if self.imageSize < expectedSize:
raise Exception("Error: image appears to be truncated, expected size=%d" % expectedSize)
print "Image size matches expected size, looks ok"
def _readPage(self, ce, page):
i = page * self.nCEs + ce
off = i * self.dumpedPageSize
os.lseek(self.fd, off, os.SEEK_SET)
return os.read(self.fd, self.dumpedPageSize)
def readPage(self, ce, page):
d = self._readPage(ce, page)
if not d or len(d) != self.dumpedPageSize:
return None,None
if self.hasIOKitStatus:
r1,r2 = struct.unpack("<LL", d[self.pageSize+self.metaSize:self.pageSize+self.metaSize+8])
if r1 != 0x0:
return None, None
data = d[:self.pageSize]
spare = d[self.pageSize:self.pageSize+self.metaSize]
if not self.hasIOKitStatus and data == self.blankPage and spare == self.blankSpare:
return None,None
return spare, data
"""
iEmu NAND format
one file for each CE, start with chip id (8 bytes) then pages
page = non-empty flag (1 byte) + data + spare metadata (12 bytes)
"""
class NANDImageSplitCEs(object):
def __init__(self, folder, geometry):
flags = os.O_RDONLY
if sys.platform == "win32":
flags |= os.O_BINARY
self.fds = []
self.nCEs = geometry["#ce"]
self.pageSize = geometry["#page-bytes"]
self.metaSize = 12
self.npages = 0
self.dumpedPageSize = 1 + self.pageSize + self.metaSize
for i in xrange(self.nCEs):
fd = os.open(folder + "/ce_%d.bin" % i, flags)
self.fds.append(fd)
self.npages += os.fstat(fd).st_size / self.dumpedPageSize
def _readPage(self, ce, page):
fd = self.fds[ce]
off = 8 + page * self.dumpedPageSize #skip chip id
os.lseek(fd, off, os.SEEK_SET)
return os.read(fd, self.dumpedPageSize)
def readPage(self, ce, page):
d = self._readPage(ce, page)
if not d or len(d) != self.dumpedPageSize:
return None,None
if d[0] != '1' and d[0] != '\x01':
return None,None
data = d[1:1+self.pageSize]
spare = d[1+self.pageSize:1+self.pageSize+self.metaSize]
return spare, data

View File

@@ -0,0 +1,220 @@
from carver import NANDCarver
from construct.core import Struct
from construct.macros import ULInt32, ULInt16, Array, ULInt8, Padding
from pprint import pprint
from structs import SpareData
from util import hexdump
from vfl import VFL
import plistlib
"""
openiboot/plat-s5l8900/ftl.c
openiboot/plat-s5l8900/includes/s5l8900/ftl.h
"""
FTLCxtLog = Struct("FTLCxtLog",
ULInt32("usn"),
ULInt16("wVbn"),
ULInt16("wLbn"),
ULInt32("wPageOffsets"),
ULInt16("pagesUsed"),
ULInt16("pagesCurrent"),
ULInt32("isSequential")
)
FTLCxtElement2 = Struct("FTLCxtElement2",
ULInt16("field_0"),
ULInt16("field_2")
)
FTLCxt = Struct("FTLCxt",
ULInt32("usnDec"),
ULInt32("nextblockusn"),
ULInt16("wNumOfFreeVb"),
ULInt16("nextFreeIdx"),
ULInt16("swapCounter"),
Array(20, ULInt16("awFreeVb")),
ULInt16("field_36"),
Array(18, ULInt32("pages_for_pawMapTable")),
Array(36, ULInt32("pages_for_pawEraseCounterTable")),
Array(34, ULInt32("pages_for_wPageOffsets")),
ULInt32("pawMapTable"),
ULInt32("pawEraseCounterTable"),
ULInt32("wPageOffsets"),
Array(18, FTLCxtLog),
ULInt32("eraseCounterPagesDirty"),
ULInt16("unk3"),
Array(3, ULInt16("FTLCtrlBlock")),
ULInt32("FTLCtrlPage"),
ULInt32("clean"),
Array(36, ULInt32("pages_for_pawReadCounterTable")),
ULInt32("pawReadCounterTable"),
Array(5, FTLCxtElement2),
ULInt32("field_3C8"),
ULInt32("totalReadCount"),
ULInt32("page_for_FTLCountsTable"),
ULInt32("hasFTLCountsTable"),
Padding(0x420), #, ULInt8("field_3D8")),
ULInt32("versionLower"),
ULInt32("versionUpper")
)
FTL_CTX_TYPE = 0x43
FTL_BLOCK_MAP = 0x44
FTL_ERASE_COUNTER = 0x46
FTL_MOUNTED = 0x47
FTL_CTX_TYPE_MAX = 0x4F
USER_TYPE = 0x40
USER_LAST_TYPE = 0x41 #last user page in superblock?
class FTL(object):
def __init__(self, nand, vfl):
self.nand = nand
self.vfl = vfl
self.pawMapTable = {} #maps logical blocks to virtual blocks
self.pLogs = {}
if not self.FTL_open():
self.FTL_restore()
def FTL_open(self):
minUsnDec = 0xffffffff
ftlCtrlBlock = 0xffff
for vb in self.vfl.VFL_get_FTLCtrlBlock():
s, d = self.vfl.read_single_page(vb * self.vfl.pages_per_sublk)
if not s:
continue
if s.type >= FTL_CTX_TYPE and s.type <= FTL_CTX_TYPE_MAX:
if s.usn < minUsnDec:
ftlCtrlBlock = vb
minUsnDec = s.usn
print ftlCtrlBlock
self.ftlCtrlBlock = ftlCtrlBlock
for p in xrange(self.vfl.pages_per_sublk-1,1, -1):
s, d = self.vfl.read_single_page(ftlCtrlBlock * self.vfl.pages_per_sublk + p)
if not s:
continue
#print s
#print p
if s.type == FTL_CTX_TYPE:
print s.usn
ctx = FTLCxt.parse(d)
if ctx.versionLower == 0x46560001:
print ctx
assert ctx.FTLCtrlPage == (ftlCtrlBlock * self.vfl.pages_per_sublk + p)
break
else:
print "Unclean shutdown, last type 0x%x" % s.type
return False
self.ctx = ctx
print "FTL_open OK !"
return True
def determine_block_type(self, block):
maxUSN = 0
isSequential = True
for page in xrange(self.vfl.pages_per_sublk-1,1, -1):
s, _ = self.vfl.read_single_page(block * self.vfl.pages_per_sublk + page)
if not s:
continue
if s.usn > maxUSN:
maxUSN = s.usn
if s.lpn % self.vfl.pages_per_sublk != page:
isSequential = False
return isSequential, maxUSN
return isSequential, maxUSN
def FTL_restore(self):
self.pLogs = self.vfl.nand.loadCachedData("pLogs")
self.pawMapTable = self.vfl.nand.loadCachedData("pawMapTable")
if self.pLogs and self.pawMapTable:
print "Found cached FTL restore information"
return
self.pawMapTable = {}
self.pLogs = {}
ctx = None
for p in xrange(self.vfl.pages_per_sublk-1,1, -1):
s, d = self.vfl.read_single_page(self.ftlCtrlBlock * self.vfl.pages_per_sublk + p)
if not s:
continue
if s.type == FTL_CTX_TYPE:
print s.usn
ctx = FTLCxt.parse(d)
if ctx.versionLower == 0x46560001:
print ctx
assert ctx.FTLCtrlPage == (self.ftlCtrlBlock * self.vfl.pages_per_sublk + p)
print "Found most recent ctx"
break
if not ctx:
print "FTL_restore fail did not find ctx"
raise
blockMap = {}
self.nonSequential = {}
print "FTL_restore in progress ..."
for sblock in xrange(self.vfl.userSuBlksTotal + 23):
for page in xrange(self.vfl.pages_per_sublk):
s, d = self.vfl.read_single_page(sblock * self.vfl.pages_per_sublk + page)
if not s:
continue
if s.type >= FTL_CTX_TYPE and s.type <= FTL_CTX_TYPE_MAX:
break
if s.type != USER_TYPE and s.type != USER_LAST_TYPE:
print "Weird page type %x at %x %x" % (s.type, sblock, page)
continue
if s.lpn % self.vfl.pages_per_sublk != page:
print "Block %d non sequential" % sblock
self.nonSequential[sblock] = 1
blockMap[sblock] = (s.lpn / self.vfl.pages_per_sublk, s.usn)
break
z = dict([(i, [(a, blockMap[a][1]) for a in blockMap.keys() if blockMap[a][0] ==i]) for i in xrange(self.vfl.userSuBlksTotal)])
for k,v in z.items():
if len(v) == 2:
print k, v
vbA, usnA = v[0]
vbB, usnB = v[1]
if usnA > usnB: #smallest USN is map block, highest log block
self.pawMapTable[k] = vbB
self.restoreLogBlock(k, vbA)
else:
self.pawMapTable[k] = vbA
self.restoreLogBlock(k, vbB)
elif len(v) > 2:
raise Exception("fufu", k, v)
else:
self.pawMapTable[k] = v[0][0]
self.vfl.nand.cacheData("pLogs", self.pLogs)
self.vfl.nand.cacheData("pawMapTable", self.pawMapTable)
def restoreLogBlock(self, lbn, vbn):
log = {"wVbn": vbn, "wPageOffsets": {}}
for page in xrange(self.vfl.pages_per_sublk):
s, d = self.vfl.read_single_page(vbn * self.vfl.pages_per_sublk + page)
if not s:
break
log["wPageOffsets"][s.lpn % self.vfl.pages_per_sublk] = page
self.pLogs[lbn] = log
def mapPage(self, lbn, offset):
if self.pLogs.has_key(lbn):
if self.pLogs[lbn]["wPageOffsets"].has_key(offset):
offset = self.pLogs[lbn]["wPageOffsets"][offset]
#print "mapPage got log %d %d" % (lbn, offset)
return self.pLogs[lbn]["wVbn"] * self.vfl.pages_per_sublk + offset
if not self.pawMapTable.has_key(lbn):
return 0xFFFFFFFF
return self.pawMapTable[lbn] * self.vfl.pages_per_sublk + offset
def readLPN(self, lpn, key=None):
lbn = lpn / self.vfl.pages_per_sublk
offset = lpn % self.vfl.pages_per_sublk
vpn = self.mapPage(lbn, offset)
if vpn == 0xFFFFFFFF:
print "lbn not found %d" % lbn
return "\xFF" * self.nand.pageSize
s,d = self.vfl.read_single_page(vpn, key, lpn)
if not s:
return None
if s.lpn != lpn:
raise Exception("FTL translation FAIL spare lpn=%d vs expected %d" % (s.lpn, lpn))
return d

View File

@@ -0,0 +1,425 @@
from crypto.aes import AESdecryptCBC
from firmware.img2 import IMG2
from firmware.img3 import Img3, extract_img3s
from firmware.scfg import parse_SCFG
from hfs.emf import EMFVolume
from hfs.hfs import HFSVolume
from image import NANDImageSplitCEs, NANDImageFlat
from keystore.effaceable import check_effaceable_header, EffaceableLockers
from legacyftl import FTL
from partition_tables import GPT_partitions, parse_lwvm, parse_mbr, parse_gpt, \
APPLE_ENCRYPTED
from progressbar import ProgressBar
from remote import NANDRemote, IOFlashStorageKitClient
from structs import *
from util import sizeof_fmt, write_file, load_pickle, save_pickle, hexdump, \
makedirs
from util.bdev import FTLBlockDevice
from vfl import VFL
from vsvfl import VSVFL
from yaftl import YAFTL
import math
import os
import plistlib
import struct
def ivForPage(page):
iv = ""
for _ in xrange(4):
if (page & 1):
page = 0x80000061 ^ (page >> 1);
else:
page = page >> 1;
iv += struct.pack("<L", page)
return iv
#iOS 3
def getEMFkeyFromCRPT(data, key89B):
assert data.startswith("tprc")
z = AESdecryptCBC(data[4:0x44], key89B)
assert z.startswith("TPRC"), "wrong key89B"
#last_byte = struct.unpack("<Q", z[4:4+8])[0]
emf = z[16:16+32]
return emf
class NAND(object):
H2FMI_HASH_TABLE = gen_h2fmi_hash_table()
def __init__(self, filename, device_infos, ppn=False):
self.device_infos = device_infos
self.partition_table = None
self.lockers = {}
self.iosVersion = 0
self.hasMBR = False
self.metadata_whitening = False
self.filename = filename
self.encrypted = device_infos["hwModel"] not in ["M68AP", "N45AP", "N82AP", "N72AP"]
self.initGeometry(device_infos["nand"])
if os.path.basename(filename).startswith("ce_"):
self.image = NANDImageSplitCEs(os.path.dirname(filename), device_infos["nand"])
elif filename == "remote":
self.image = NANDRemote(self.pageSize, self.metaSize, self.pagesPerBlock, self.bfn)
else:
self.image = NANDImageFlat(filename, device_infos["nand"])
s, page0 = self.readPage(0,0)
self.nandonly = (page0 != None) and page0.startswith("ndrG")
if self.nandonly:
self.encrypted = True
magics = ["DEVICEINFOBBT"]
nandsig = None
if page0 and page0[8:14] == "Darwin":
print "Found old style signature", page0[:8]
nandsig = page0
else:
magics.append("NANDDRIVERSIGN")
#sp0 = {}
sp0 = self.readSpecialPages(0, magics)
print "Found %s special pages in CE 0" % (", ".join(sp0.keys()))
if not self.nandonly:
print "Device does not boot from NAND (=> has a NOR)"
vfltype = '1' #use VSVFL by default
if not nandsig:
nandsig = sp0.get("NANDDRIVERSIGN")
if not nandsig:
print "NANDDRIVERSIGN not found, assuming metadata withening = %d" % self.metadata_whitening
else:
nSig, flags = struct.unpack("<LL", nandsig[:8])
#assert nandsig[3] == chr(0x43)
vfltype = nandsig[1]
self.metadata_whitening = (flags & 0x10000) != 0
print "NAND signature 0x%x flags 0x%x withening=%d, epoch=%s" % (nSig, flags, self.metadata_whitening, nandsig[0])
if not self.nandonly:
if self.device_infos.has_key("lockers"):
self.lockers = EffaceableLockers(self.device_infos.lockers.data)
else:
unit = self.findLockersUnit()
if unit:
self.lockers = EffaceableLockers(unit[0x40:])
self.lockers.display()
if not self.device_infos.has_key("lockers"):
self.device_infos.lockers = plistlib.Data(unit[0x40:0x40+960])
EMF = self.getEMF(device_infos["key89B"].decode("hex"))
dkey = self.getDKey(device_infos["key835"].decode("hex"))
self.device_infos.EMF = EMF.encode("hex")
self.device_infos.DKey = dkey.encode("hex")
deviceuniqueinfo = sp0.get("DEVICEUNIQUEINFO")
if not deviceuniqueinfo:
print "DEVICEUNIQUEINFO not found"
else:
scfg = parse_SCFG(deviceuniqueinfo)
print "Found DEVICEUNIQUEINFO, serial number=%s" % scfg.get("SrNm","SrNm not found !")
if vfltype == '0':
print "Using legacy VFL"
self.vfl = VFL(self)
self.ftl = FTL(self, self.vfl)
elif not ppn:
print "Using VSVFL"
self.vfl = VSVFL(self)
self.ftl = YAFTL(self.vfl)
def initGeometry(self, d):
self.metaSize = d.get("meta-per-logical-page", 0)
if self.metaSize == 0:
self.metaSize = 12
dumpedPageSize = d.get("dumpedPageSize", d["#page-bytes"] + self.metaSize + 8)
self.dump_size= d["#ce"] * d["#ce-blocks"] * d["#block-pages"] * dumpedPageSize
self.totalPages = d["#ce"] * d["#ce-blocks"] * d["#block-pages"]
nand_size = d["#ce"] * d["#ce-blocks"] * d["#block-pages"] * d["#page-bytes"]
hsize = sizeof_fmt(nand_size)
self.bfn = d.get("boot-from-nand", False)
self.dumpedPageSize = dumpedPageSize
self.pageSize = d["#page-bytes"]
self.bootloaderBytes = d.get("#bootloader-bytes", 1536)
self.emptyBootloaderPage = "\xFF" * self.bootloaderBytes
self.blankPage = "\xFF" * self.pageSize
self.nCEs =d["#ce"]
self.blocksPerCE = d["#ce-blocks"]
self.pagesPerBlock = d["#block-pages"]
self.pagesPerCE = self.blocksPerCE * self.pagesPerBlock
self.vendorType = d["vendor-type"]
self.deviceReadId = d.get("device-readid", 0)
self.banks_per_ce_vfl = d["banks-per-ce"]
if d.has_key("metadata-whitening"):
self.metadata_whitening = (d["metadata-whitening"].data == "\x01\x00\x00\x00")
if nand_chip_info.has_key(self.deviceReadId):
self.banks_per_ce_physical = nand_chip_info.get(self.deviceReadId)[7]
else:
#raise Exception("Unknown deviceReadId %x" % self.deviceReadId)
print "!!! Unknown deviceReadId %x, assuming 1 physical bank /CE, will probably fail" % self.deviceReadId
self.banks_per_ce_physical = 1
print "Chip id 0x%x banks per CE physical %d" % (self.deviceReadId, self.banks_per_ce_physical)
self.blocks_per_bank = self.blocksPerCE / self.banks_per_ce_physical
if self.blocksPerCE & (self.blocksPerCE-1) == 0:
self.bank_address_space = self.blocks_per_bank
self.total_block_space = self.blocksPerCE
else:
bank_address_space = next_power_of_two(self.blocks_per_bank)
self.bank_address_space = bank_address_space
self.total_block_space = ((self.banks_per_ce_physical-1)*bank_address_space) + self.blocks_per_bank
self.bank_mask = int(math.log(self.bank_address_space * self.pagesPerBlock,2))
print "NAND geometry : %s (%d CEs (%d physical banks/CE) of %d blocks of %d pages of %d bytes data, %d bytes metdata)" % \
(hsize, self.nCEs, self.banks_per_ce_physical, self.blocksPerCE, self.pagesPerBlock, self.pageSize, d["meta-per-logical-page"])
def unwhitenMetadata(self, meta, pagenum):
if len(meta) != 12:
return None
s = list(struct.unpack("<LLL", meta))
for i in xrange(3):
s[i] ^= NAND.H2FMI_HASH_TABLE[(i+pagenum) % len(NAND.H2FMI_HASH_TABLE)]
return struct.pack("<LLL", s[0], s[1],s[2])
def readBootPage(self, ce, page):
s,d=self.readPage(ce, page)
if d:
return d[:self.bootloaderBytes]
else:
#print "readBootPage %d %d failed" % (ce,page)
return self.emptyBootloaderPage
def readMetaPage(self, ce, block, page, spareType=SpareData):
return self.readBlockPage(ce, block, page, META_KEY, spareType=spareType)
def readBlockPage(self, ce, block, page, key=None, lpn=None, spareType=SpareData):
assert page < self.pagesPerBlock
pn = block * self.pagesPerBlock + page
return self.readPage(ce, pn, key, lpn, spareType=spareType)
def translateabsPage(self, page):
return page % self.nCEs, page/self.nCEs
def readAbsPage(self, page, key=None, lpn=None):
return self.readPage(page % self.nCEs, page/self.nCEs, key, lpn)
def readPage(self, ce, page, key=None, lpn=None, spareType=SpareData):
if ce > self.nCEs or page > self.pagesPerCE:
#hax physical banking
pass#raise Exception("CE %d Page %d out of bounds" % (ce, page))
if self.filename != "remote": #undo banking hax
bank = (page & ~((1 << self.bank_mask) - 1)) >> self.bank_mask
page2 = (page & ((1 << self.bank_mask) - 1))
page2 = bank * (self.blocks_per_bank) * self.pagesPerBlock + page2
spare, data = self.image.readPage(ce, page2)
else:
spare, data = self.image.readPage(ce, page)
if not data:
return None,None
if self.metadata_whitening and spare != "\x00"*12 and len(spare) == 12:
spare = self.unwhitenMetadata(spare, page)
spare = spareType.parse(spare)
if key and self.encrypted:
if lpn != None: pageNum = spare.lpn #XXX
else: pageNum = page
return spare, self.decryptPage(data, key, pageNum)
return spare, data
def decryptPage(self, data, key, pageNum):
return AESdecryptCBC(data, key, ivForPage(pageNum))
def unpackSpecialPage(self, data):
l = struct.unpack("<L", data[0x34:0x38])[0]
return data[0x38:0x38 + l]
def readSpecialPages(self, ce, magics):
print "Searching for special pages..."
specials = {}
if self.nandonly:
magics.append("DEVICEUNIQUEINFO")#, "DIAGCONTROLINFO")
magics = map(lambda s: s.ljust(16,"\x00"), magics)
lowestBlock = self.blocksPerCE - (self.blocksPerCE / 100)
for block in xrange(self.blocksPerCE - 1, lowestBlock, -1):
if len(magics) == 0:
break
#hax for physical banking
bank_offset = self.bank_address_space * (block / self.blocks_per_bank)
for page in xrange(self.pagesPerBlock,-1,-1):
page = (bank_offset + block % self.blocks_per_bank) * self.pagesPerBlock + page
s, data = self.readPage(ce, page)
if data == None:
continue
if data[:16] in magics:
self.encrypted = False
magics.remove(data[:16])
specials[data[:16].rstrip("\x00")] = self.unpackSpecialPage(data)
break
data = self.decryptPage(data, META_KEY, page)
#print data[:16]
if data[:16] in magics:
#print data[:16], block, page
self.encrypted = True
magics.remove(data[:16])
specials[data[:16].rstrip("\x00")] = self.unpackSpecialPage(data)
break
return specials
def readLPN(self, lpn, key):
return self.ftl.readLPN(lpn, key)
def readVPN(self, vpn, key=None, lpn=None):
return self.vfl.read_single_page(vpn, key, lpn)
def dumpSystemPartition(self, outputfilename):
return self.getPartitionBlockDevice(0).dumpToFile(outputfilename)
def dumpDataPartition(self, emf, outputfilename):
return self.getPartitionBlockDevice(1, emf).dumpToFile(outputfilename)
def isIOS5(self):
self.getPartitionTable()
return self.iosVersion == 5
def getPartitionTable(self):
if self.partition_table:
return self.partition_table
pt = None
for i in xrange(10):
d = self.readLPN(i, FILESYSTEM_KEY)
pt = parse_mbr(d)
if pt:
self.hasMBR = True
self.iosVersion = 3
break
gpt = parse_gpt(d)
if gpt:
off = gpt.partition_entries_lba - gpt.current_lba
d = self.readLPN(i+off, FILESYSTEM_KEY)
pt = GPT_partitions.parse(d)[:-1]
self.iosVersion = 4
break
pt = parse_lwvm(d, self.pageSize)
if pt:
self.iosVersion = 5
break
self.partition_table = pt
return pt
def getPartitionBlockDevice(self, partNum, key=None):
pt = self.getPartitionTable()
if self.hasMBR and pt[1].type == APPLE_ENCRYPTED and partNum == 1:
data = self.readLPN(pt[1].last_lba - 1, FILESYSTEM_KEY)
key = getEMFkeyFromCRPT(data, self.device_infos["key89B"].decode("hex"))
if key == None:
if partNum == 0:
key = FILESYSTEM_KEY
elif partNum == 1 and self.device_infos.has_key("EMF"):
key = self.device_infos["EMF"].decode("hex")
return FTLBlockDevice(self, pt[partNum].first_lba, pt[partNum].last_lba, key)
def getPartitionVolume(self, partNum, key=None):
bdev = self.getPartitionBlockDevice(partNum, key)
if partNum == 0:
return HFSVolume(bdev)
elif partNum == 1:
self.device_infos["dataVolumeOffset"] = self.getPartitionTable()[partNum].first_lba
return EMFVolume(bdev, self.device_infos)
def findLockersUnit(self):
if not self.nandonly:
return
for i in xrange(96,128):
for ce in xrange(self.nCEs):
s, d = self.readBlockPage(ce, 1, i)
if d and check_effaceable_header(d):
print "Found effaceable lockers in ce %d block 1 page %d" % (ce,i)
return d
def getLockers(self):
unit = self.findLockersUnit()
if unit:
return unit[0x40:0x40+960]
def getEMF(self, k89b):
return self.lockers.get_EMF(k89b)
def getDKey(self, k835):
return self.lockers.get_DKey(k835)
def readBootPartition(self, block_start, block_end):
res = ""
for i in xrange(block_start*self.pagesPerBlock, block_end*self.pagesPerBlock):
res += self.readBootPage(0, i)
return res
def get_img3s(self):
if not self.nandonly:
print "IMG3s are in NOR"
return []
blob = self.readBootPartition(8, 16)
hdr = IMG2.parse(blob[:0x100])
i = hdr.images_block * hdr.block_size + hdr.images_offset
img3s = extract_img3s(blob[i:i+hdr.images_length*hdr.block_size])
boot = self.readBootPartition(0, 1)
img3s = extract_img3s(boot[0xc00:]) + img3s
return img3s
def extract_img3s(self, outfolder=None):
if not self.nandonly:
print "IMG3s are in NOR"
return
if outfolder == None:
if self.filename != "remote": outfolder = os.path.join(os.path.dirname(self.filename), "img3")
else: outfolder = os.path.join(".", "img3")
makedirs(outfolder)
print "Extracting IMG3s to %s" % outfolder
for img3 in self.get_img3s():
#print img3.sigcheck(self.device_infos.get("key89A").decode("hex"))
print img3.shortname
write_file(outfolder+ "/%s.img3" % img3.shortname, img3.img3)
kernel = self.getPartitionVolume(0).readFile("/System/Library/Caches/com.apple.kernelcaches/kernelcache",returnString=True)
if kernel:
print "kernel"
write_file(outfolder + "/kernelcache.img3", kernel)
def extract_shsh(self, outfolder="."):
if not self.nandonly:
print "IMG3s are in NOR"
return
pass
def getNVRAM(self):
if not self.nandonly:
print "NVRAM is in NOR"
return
#TODO
nvrm = self.readBootPartition(2, 8)
def getBoot(self):
boot = self.readBootPartition(0, 1)
for i in xrange(0x400, 0x600, 16):
name = boot[i:i+4][::-1]
block_start, block_end, flag = struct.unpack("<LLL", boot[i+4:i+16])
if name == "none":
break
print name, block_start, block_end, flag
def cacheData(self, name, data):
if self.filename == "remote":
return None
save_pickle(self.filename + "." + name, data)
def loadCachedData(self, name):
try:
if self.filename == "remote":
return None
return load_pickle(self.filename + "." + name)
except:
return None
def dump(self, p):
#hax ioflashstoragekit can only handle 1 connexion
if self.filename == "remote":
del self.image
ioflash = IOFlashStorageKitClient()
ioflash.dump_nand(p)
#restore proxy
if self.filename == "remote":
self.image = NANDRemote(self.pageSize, self.metaSize, self.pagesPerBlock, self.bfn)

View File

@@ -0,0 +1,132 @@
from construct import *
from zipfile import crc32
GPT_HFS = "005346480000aa11aa1100306543ecac".decode("hex")
GPT_EMF = "00464d450000aa11aa1100306543ecac".decode("hex")
LWVM_partitionRecord = Struct("LWVM_partitionRecord",
String("type", 16),
String("guid", 16),
ULInt64("begin"),
ULInt64("end"),
ULInt64("attribute"),
String("name", 0x48, encoding="utf-16-le", padchar="\x00")
)
LWVM_MAGIC = "6a9088cf8afd630ae351e24887e0b98b".decode("hex")
LWVM_header = Struct("LWVM_header",
String("type",16),
String("guid", 16),
ULInt64("mediaSize"),
ULInt32("numPartitions"),
ULInt32("crc32"),
Padding(464),
Array(12, LWVM_partitionRecord),
Array(1024, ULInt16("chunks"))
)
GPT_header = Struct("GPT_header",
String("signature", 8),
ULInt32("revision"),
ULInt32("header_size"),
SLInt32("crc"), #hax to match python signed crc
ULInt32("zero"),
ULInt64("current_lba"),
ULInt64("backup_lba"),
ULInt64("first_usable_lba"),
ULInt64("last_usable_lba"),
String("disk_guid", 16),
ULInt64("partition_entries_lba"),
ULInt32("num_partition_entries"),
ULInt32("size_partition_entry"),
ULInt32("crc_partition_entries")
)
GPT_entry = Struct("GPT_entry",
String("partition_type_guid", 16),
String("partition_guid", 16),
ULInt64("first_lba"),
ULInt64("last_lba"),
ULInt64("attributes"),
String("name", 72, encoding="utf-16-le", padchar="\x00"),
)
GPT_partitions = RepeatUntil(lambda obj, ctx: obj["partition_type_guid"] == "\x00"*16, GPT_entry)
APPLE_ENCRYPTED = 0xAE
MBR_entry = Struct("MBR_entry",
Byte("status"),
Bytes("chs_start",3),
Byte("type"),
Bytes("chs_last",3),
ULInt32("lba_start"),
ULInt32("num_sectors")
)
MBR = Struct("MBR",
String("code",440),
ULInt32("signature"),
ULInt16("zero"),
Array(4, MBR_entry),
OneOf(ULInt16("magic"), [0xAA55])
)
def parse_mbr(data):
try:
mbr = MBR.parse(data)
if mbr.MBR_entry[0].type == 0xEE:
print "Found protective MBR"
return None
res = mbr.MBR_entry[:2]
for p in res:
p.first_lba = p.lba_start
p.last_lba = p.lba_start + p.num_sectors
return res
except:
return None
def parse_gpt(data):
gpt = GPT_header.parse(data)
if gpt.signature != "EFI PART":
return None
print "Found GPT header current_lba=%d partition_entries_lba=%d" % (gpt.current_lba, gpt.partition_entries_lba)
assert gpt.partition_entries_lba > gpt.current_lba
check = gpt.crc
gpt.crc = 0
actual = crc32(GPT_header.build(gpt))
if actual != check:
print "GPT crc check fail %d vs %d" % (actual, check)
return None
return gpt
def parse_lwvm(data, pageSize):
try:
hdr = LWVM_header.parse(data)
if hdr.type != LWVM_MAGIC:
print "LwVM magic mismatch"
return
tocheck = data[:44] + "\x00\x00\x00\x00" + data[48:0x1000]
check = crc32(tocheck) & 0xffffffff
if check != hdr.crc32:
return None
print "LwVM header CRC OK"
partitions = hdr.LWVM_partitionRecord[:hdr.numPartitions]
deviceSize=0
#XXX: HAAAAAAAX
for s in [8, 16, 32, 64, 128]:
if hdr.mediaSize < (s* 1024*1024*1024):
deviceSize = s
break
for i in xrange(len(hdr.chunks)):
if hdr.chunks[i] == 0x0:
lba0 = (i * deviceSize*1024*1024) / pageSize
partitions[0].first_lba = lba0
partitions[0].last_lba = lba0 + (partitions[0].end - partitions[0].begin) / pageSize
elif hdr.chunks[i] == 0x1000:
lbad = (i * deviceSize*1024*1024) / pageSize
partitions[1].first_lba = lbad
partitions[1].last_lba = lbad + (partitions[1].end - partitions[1].begin) / pageSize
return partitions
except:
return None

View File

@@ -0,0 +1,99 @@
from progressbar import ProgressBar
from usbmux import usbmux
from util import hexdump, sizeof_fmt
import datetime
import hashlib
import struct
import os
CMD_DUMP = 0
CMD_PROXY = 1
kIOFlashStorageOptionRawPageIO = 0x002
kIOFlashStorageOptionBootPageIO = 0x100
class IOFlashStorageKitClient(object):
def __init__(self, udid=None, host="localhost", port=2000):
self.host = host
self.port = port
self.connect(udid)
def connect(self, udid=None):
mux = usbmux.USBMux()
mux.process(1.0)
if not mux.devices:
print "Waiting for iOS device"
while not mux.devices:
mux.process(1.0)
if not mux.devices:
print "No device found"
return
dev = mux.devices[0]
try:
self.s = mux.connect(dev, self.port)
except:
raise Exception("Connexion to device %s port %d failed" % (dev.serial, self.port))
def send_command(self, cmd):
return self.s.send(struct.pack("<L", cmd))
def dump_nand(self, filename):
f = open(filename, "wb")
self.send_command(CMD_DUMP)
zz = self.s.recv(8)
totalSize = struct.unpack("<Q", zz)[0]
recvSize = 0
print "Dumping %s NAND to %s" % (sizeof_fmt(totalSize), filename)
pbar = ProgressBar(totalSize)
pbar.start()
h = hashlib.sha1()
while recvSize < totalSize:
pbar.update(recvSize)
d = self.s.recv(8192*2)
if not d or len(d) == 0:
break
h.update(d)
f.write(d)
recvSize += len(d)
pbar.finish()
f.close()
print "NAND dump time : %s" % str(datetime.timedelta(seconds=pbar.seconds_elapsed))
print "SHA1: %s" % h.hexdigest()
if recvSize != totalSize:
print "dump_nand FAIL"
class NANDRemote(object):
def __init__(self, pageSize, spareSize, pagesPerBlock, bfn):
self.spareSize = spareSize
self.pageSize = pageSize
self.pagesPerBlock = pagesPerBlock
self.bootFromNand = bfn
self.client = IOFlashStorageKitClient()
self.client.send_command(CMD_PROXY)
def readPage(self, ce, page):
options = 0
spareSize = self.spareSize
if self.bootFromNand and page < 16*self.pagesPerBlock:#XXX hardcoded for now
options = kIOFlashStorageOptionBootPageIO
spareSize = 0
d = struct.pack("<LLLL", ce, page, spareSize, options)
self.client.s.send(d)
torecv = self.pageSize+8+spareSize
d = ""
while len(d) != torecv:
zz = self.client.s.recv(torecv)
if not zz:
break
d += zz
pageData = d[:self.pageSize]
spareData = d[self.pageSize:self.pageSize+spareSize]
r1,r2 = struct.unpack("<LL", d[self.pageSize+spareSize:self.pageSize+spareSize+8])
if r1 == 0xe00002e5:
return None, None
#print ce, page, "%x" % r1, r2, pageData[:0x10].encode("hex"), spareData[:0x10].encode("hex")
if spareData == "":
spareData = "\xFF" * self.spareSize
return spareData, pageData

View File

@@ -0,0 +1,83 @@
from construct.core import Struct, Union
from construct.macros import *
#hardcoded iOS keys
META_KEY = "92a742ab08c969bf006c9412d3cc79a5".decode("hex")
FILESYSTEM_KEY = "f65dae950e906c42b254cc58fc78eece".decode("hex")
def next_power_of_two(z):
i = 1
while i < z:
i <<= 1
return i
def CEIL_DIVIDE(val, amt):
return (((val) + (amt) - 1) / (amt))
#from openiboot/plat-s5l8920/h2fmi.c
#blocks_per_ce, pages_per_block, bytes_per_page, bytes_per_spare, unk5, unk6, unk7, banks_per_ce, unk9
#some values change in openiboot/plat-a4/h2fmi.c, but banks_per_ce is ok
nand_chip_info = {
0x7294D7EC : [ 0x1038, 0x80, 0x2000, 0x1B4, 0xC, 0, 8, 1, 0 ],
0x72D5DEEC : [ 0x2070, 0x80, 0x2000, 0x1B4, 0xC, 0, 8, 2, 0 ],
0x29D5D7EC : [ 0x2000, 0x80, 0x1000, 0xDA, 8, 0, 2, 2, 0 ],
0x2994D5EC : [ 0x1000, 0x80, 0x1000, 0xDA, 8, 0, 2, 1, 0 ],
0xB614D5EC : [ 0x1000, 0x80, 0x1000, 0x80, 4, 0, 2, 1, 0 ],
0xB655D7EC : [ 0x2000, 0x80, 0x1000, 0x80, 4, 0, 2, 2, 0 ],
0xB614D5AD : [ 0x1000, 0x80, 0x1000, 0x80, 4, 0, 3, 1, 0 ],
0x3294E798 : [ 0x1004, 0x80, 0x2000, 0x1C0, 0x10, 0, 1, 1, 0 ],
0xBA94D598 : [ 0x1000, 0x80, 0x1000, 0xDA, 8, 0, 1, 1, 0 ],
0xBA95D798 : [ 0x2000, 0x80, 0x1000, 0xDA, 8, 0, 1, 2, 0 ],
0x3294D798 : [ 0x1034, 0x80, 0x2000, 0x178, 8, 0, 1, 1, 0 ],
0x3295DE98 : [ 0x2068, 0x80, 0x2000, 0x178, 8, 0, 1, 2, 0 ],
0x3295EE98 : [ 0x2008, 0x80, 0x2000, 0x1C0, 0x18, 0, 1, 2, 0 ],
0x3E94D789 : [ 0x2000, 0x80, 0x1000, 0xDA, 0x10, 0, 5, 1, 0 ],
0x3ED5D789 : [ 0x2000, 0x80, 0x1000, 0xDA, 8, 0, 6, 2, 0 ],
0x3ED5D72C : [ 0x2000, 0x80, 0x1000, 0xDA, 8, 0, 5, 2, 0 ],
0x3E94D72C : [ 0x2000, 0x80, 0x1000, 0xDA, 0xC, 0, 7, 1, 0 ],
0x4604682C : [ 0x1000, 0x100, 0x1000, 0xE0, 0xC, 0, 7, 1, 0 ],
0x3294D745 : [ 0x1000, 0x80, 0x2000, 0x178, 8, 0, 9, 1, 0 ],
0x3295DE45 : [ 0x2000, 0x80, 0x2000, 0x178, 8, 0, 9, 2, 0 ],
0x32944845 : [ 0x1000, 0x80, 0x2000, 0x1C0, 8, 0, 9, 1, 0 ],
0x32956845 : [ 0x2000, 0x80, 0x2000, 0x1C0, 8, 0, 9, 2, 0 ],
}
#https://github.com/iDroid-Project/openiBoot/blob/master/openiboot/plat-a4/h2fmi.c
def gen_h2fmi_hash_table():
val = 0x50F4546A;
h2fmi_hash_table = [0]*256
for i in xrange(256):
val = ((0x19660D * val) + 0x3C6EF35F) & 0xffffffff;
for j in xrange(762):
val = ((0x19660D * val) + 0x3C6EF35F) & 0xffffffff;
h2fmi_hash_table[i] = val & 0xffffffff
return h2fmi_hash_table
# Page types (as defined in the spare data "type" bitfield)
PAGETYPE_INDEX = 0x4 #Index block indicator
PAGETYPE_LBN = 0x10 # User data
PAGETYPE_FTL_CLEAN = 0x20 # FTL context (unmounted, clean)
PAGETYPE_VFL = 0x80 #/ VFL context
SpareData = Struct("SpareData",
ULInt32("lpn"),
ULInt32("usn"),
ULInt8("field_8"),
ULInt8("type"),
ULInt16("field_A")
)
# Block status (as defined in the BlockStruct structure)
BLOCKSTATUS_ALLOCATED = 0x1
BLOCKSTATUS_FTLCTRL = 0x2
BLOCKSTATUS_GC = 0x4
BLOCKSTATUS_CURRENT = 0x8
BLOCKSTATUS_FTLCTRL_SEL = 0x10
BLOCKSTATUS_I_GC = 0x20
BLOCKSTATUS_I_ALLOCATED = 0x40
BLOCKSTATUS_I_CURRENT = 0x80
BLOCKSTATUS_FREE = 0xFF
ERROR_ARG = 0x80000001
ERROR_NAND = 0x80000002
ERROR_EMPTY = 0x80000003

View File

@@ -0,0 +1,188 @@
from array import array
from construct.core import Struct, Union
from construct.macros import *
from structs import next_power_of_two, CEIL_DIVIDE, PAGETYPE_VFL
import struct
"""
https://github.com/iDroid-Project/openiBoot/blob/master/plat-s5l8900/includes/s5l8900/ftl.h
https://github.com/iDroid-Project/openiBoot/blob/master/plat-s5l8900/ftl.c
https://github.com/iDroid-Project/openiBoot/blob/master/plat-s5l8900/nand.c
static const NANDDeviceType SupportedDevices[] = {
"""
SupportedDevices = {0x2555D5EC: [8192, 128, 4, 64, 4, 2, 4, 2, 7744, 4, 6],
0xB614D5EC: [4096, 128, 8, 128, 4, 2, 4, 2, 3872, 4, 6],
0xB655D7EC: [8192, 128, 8, 128, 4, 2, 4, 2, 7744, 4, 6],
0xA514D3AD: [4096, 128, 4, 64, 4, 2, 4, 2, 3872, 4, 6],
0xA555D5AD: [8192, 128, 4, 64, 4, 2, 4, 2, 7744, 4, 6],
0xB614D5AD: [4096, 128, 8, 128, 4, 2, 4, 2, 3872, 4, 6],
0xB655D7AD: [8192, 128, 8, 128, 4, 2, 4, 2, 7744, 4, 6],
0xA585D598: [8320, 128, 4, 64, 6, 2, 4, 2, 7744, 4, 6],
0xBA94D598: [4096, 128, 8, 216, 6, 2, 4, 2, 3872, 8, 8],
0xBA95D798: [8192, 128, 8, 216, 6, 2, 4, 2, 7744, 8, 8],
0x3ED5D789: [8192, 128, 8, 216, 4, 2, 4, 2, 7744, 8, 8],
0x3E94D589: [4096, 128, 8, 216, 4, 2, 4, 2, 3872, 8, 8],
0x3ED5D72C: [8192, 128, 8, 216, 4, 2, 4, 2, 7744, 8, 8],
0x3E94D52C: [4096, 128, 8, 216, 4, 2, 4, 2, 3872, 8, 8]
}
_vfl_vfl_context = Struct("_vfl_vfl_context",
ULInt32("usn_inc"),
Array(3, ULInt16("control_block")),
ULInt16("unk1"),
ULInt32("usn_dec"),
ULInt16("active_context_block"),
ULInt16("next_context_page"),
ULInt16("unk2"),
ULInt16("field_16"),
ULInt16("field_18"),
ULInt16("num_reserved_blocks"),
ULInt16("reserved_block_pool_start"),
ULInt16("total_reserved_blocks"),
Array(820, ULInt16("reserved_block_pool_map")),
Array(282, ULInt8("bad_block_table")),
Array(4, ULInt16("vfl_context_block")),
ULInt16("remapping_schedule_start"),
Array(0x48, ULInt8("unk3")),
ULInt32("version"),
ULInt32("checksum1"),
ULInt32("checksum2")
)
_vfl_vsvfl_spare_data = Struct("_vfl_vsvfl_spare_data",
Union("foo",
Struct("user",ULInt32("logicalPageNumber"),ULInt32("usn")),
Struct("meta",ULInt32("usnDec"),ULInt16("idx"), ULInt8("field_6"), ULInt8("field_7"))
),
ULInt8("type2"),
ULInt8("type1"),
ULInt8("eccMark"),
ULInt8("field_B"),
)
def vfl_checksum(data):
x = 0
y = 0
for z in array("I", data):
x = (x + z) & 0xffffffff
y = (y ^ z) & 0xffffffff
return (x + 0xAABBCCDD) & 0xffffffff, (y ^ 0xAABBCCDD) & 0xffffffff
def vfl_check_checksum(ctx, ctxtype):
c1, c2 = vfl_checksum(ctxtype.build(ctx)[:-8])
return c1 == ctx.checksum1 and c2 == ctx.checksum2
class VFL(object):
def __init__(self, nand):
self.nand = nand
#XXX check
self.banks_total = nand.nCEs * nand.banks_per_ce_physical
self.num_ce = nand.nCEs
self.banks_per_ce = nand.banks_per_ce_physical
self.blocks_per_ce = nand.blocksPerCE
self.pages_per_block = nand.pagesPerBlock
self.pages_per_block_2 = next_power_of_two(self.pages_per_block)
self.pages_per_sublk = self.pages_per_block * self.banks_per_ce * self.num_ce
self.blocks_per_bank = self.blocks_per_ce / self.banks_per_ce
self.blocks_per_bank_vfl = self.blocks_per_ce / self.banks_per_ce
self.vendorType = nand.vendorType
self.fs_start_block = 5
#field_4 = 5;
if not SupportedDevices.has_key(nand.deviceReadId):
raise Exception("VFL: unsupported device 0x%x" % nand.deviceReadId)
userSuBlksTotal = self.userSuBlksTotal = SupportedDevices[nand.deviceReadId][8]#7744
userPagesTotal = userSuBlksTotal * self.pages_per_sublk
suBlksTotal = self.blocks_per_ce
FTLData_field_2 = suBlksTotal - userSuBlksTotal - 28
print suBlksTotal, userSuBlksTotal, FTLData_field_2
FTLData_field_4 = FTLData_field_2 + 5
self.FTLData_field_4 = FTLData_field_4
#FTLData_sysSuBlks = FTLData_field_2 + 4
#FTLData_field_6 = 3
#FTLData_field_8 = 23
self.vflContexts = []
self.bbt = []
self.current_version = 0
self.context = None
reserved_blocks = 0
fs_start_block = reserved_blocks+10 #XXX
for ce in xrange(self.num_ce):
for b in xrange(reserved_blocks, fs_start_block):
s, d = nand.readMetaPage(ce, b, 0, _vfl_vsvfl_spare_data)
if not d:
continue
vflctx = _vfl_vfl_context.parse(d)
if not vfl_check_checksum(vflctx, _vfl_vfl_context):
vflctx = None
continue
break
MostRecentVFLCxtBlock = -1
minUsn = 0xFFFFFFFF
for b in vflctx.vfl_context_block:
s, d = nand.readMetaPage(ce, b, 0, _vfl_vsvfl_spare_data)
if not d:
continue
if s.foo.meta.usnDec > 0 and s.foo.meta.usnDec <= minUsn:
minUsn = s.foo.meta.usnDec;
MostRecentVFLCxtBlock = b
if MostRecentVFLCxtBlock == -1:
print "MostRecentVFLCxtBlock == -1"
return
last = None
for pageNum in xrange(0, self.pages_per_block, 1):
s,d = nand.readMetaPage(ce, MostRecentVFLCxtBlock, pageNum, _vfl_vsvfl_spare_data)
if not d:
break
vflctx = _vfl_vfl_context.parse(d)
if vfl_check_checksum(vflctx, _vfl_vfl_context):
last = vflctx
if not last:
raise Exception("VFL open FAIL 1")
self.vflContexts.append(last)
if last.version == 1 and last.usn_inc >= self.current_version:
self.current_version = last.usn_inc
self.context = last
if not self.context:
raise Exception("VFL open FAIL")
print "VFL context open OK"
def VFL_get_FTLCtrlBlock(self):
for ctx in self.vflContexts:
if ctx.usn_inc == self.current_version:
return ctx.control_block
def vfl_is_good_block(self, bbt, block):
if block > self.blocks_per_ce:
raise Exception("vfl_is_good_block block %d out of bounds" % block)
index = block/8
return ((bbt[index / 8] >> (7 - (index % 8))) & 0x1) == 0x1
def virtual_block_to_physical_block(self, ce, pBlock):
if self.vfl_is_good_block(self.vflContexts[ce].bad_block_table, pBlock):
return pBlock
ctx = self.vflContexts[ce]
for pwDesPbn in xrange(0, ctx.num_reserved_blocks):
if ctx.reserved_block_pool_map[pwDesPbn] == pBlock:
if pwDesPbn > self.blocks_per_ce:
raise Exception("Destination physical block for remapping is greater than number of blocks per bank!")
return ctx.reserved_block_pool_start + pwDesPbn
print "Bad block %d not remapped" % pBlock
return pBlock
def virtual_page_number_to_virtual_address(self, vpn):
vbank = vpn % self.num_ce
vblock = vpn / self.pages_per_sublk
vpage = (vpn / self.num_ce) % self.pages_per_block
return vbank, vblock, vpage
def read_single_page(self, vpn, key=None, lpn=None):
vpn += self.pages_per_sublk * self.FTLData_field_4
vbank, vblock, vpage = self.virtual_page_number_to_virtual_address(vpn)
pblock = self.virtual_block_to_physical_block(vbank, vblock)
#print "VFL read_single_page %d => %d, %d" % (vpn,ce,pPage)
return self.nand.readPage(vbank, pblock*self.nand.pagesPerBlock + vpage, key, lpn)

View File

@@ -0,0 +1,193 @@
from construct import *
from structs import next_power_of_two, PAGETYPE_VFL, CEIL_DIVIDE
from vfl import vfl_check_checksum, _vfl_vsvfl_spare_data
"""
https://github.com/iDroid-Project/openiBoot/blob/master/vfl-vsvfl/vsvfl.c
https://github.com/iDroid-Project/openiBoot/blob/master/vfl-vsvfl/includes/vfl/vsvfl.h
"""
_vfl_vsvfl_context = Struct("_vfl_vsvfl_context",
ULInt32("usn_inc"),
ULInt32("usn_dec"),
ULInt32("ftl_type"),
ULInt16("usn_block"),
ULInt16("usn_page"),
ULInt16("active_context_block"),
ULInt16("write_failure_count"),
ULInt16("bad_block_count"),
Array(4, ULInt8("replaced_block_count")),
ULInt16("num_reserved_blocks"),
ULInt16("field_1C"),
ULInt16("total_reserved_blocks"),
Array(6, ULInt8("field_20")),
Array(820, ULInt16("reserved_block_pool_map")),
Array(4, ULInt16("vfl_context_block")),
ULInt16("usable_blocks_per_bank"),
ULInt16("reserved_block_pool_start"),
Array(3, ULInt16("control_block")),
ULInt16("scrub_list_length"),
Array(20, ULInt16("scrub_list")),
Array(4, ULInt32("field_6CA")),
ULInt32("vendor_type"),
Array(204, ULInt8("field_6DE")),
ULInt16("remapping_schedule_start"),
Array(0x48, ULInt8("unk3")),
ULInt32("version"),
ULInt32("checksum1"),
ULInt32("checksum2")
)
class VSVFL(object):
def __init__(self, nand):
self.nand = nand
self.banks_per_ce_vfl = 1
if self.nand.vendorType in [0x100010, 0x100014, 0x120014, 0x150011]:
self.banks_per_ce_vfl = 2
self.banks_total = nand.nCEs * self.banks_per_ce_vfl
self.num_ce = nand.nCEs
self.banks_per_ce = nand.banks_per_ce_physical
self.blocks_per_ce = nand.blocksPerCE
self.pages_per_block = nand.pagesPerBlock
self.pages_per_block_2 = next_power_of_two(self.pages_per_block)
self.pages_per_sublk = self.pages_per_block * self.banks_per_ce_vfl * self.num_ce
self.blocks_per_bank = self.blocks_per_ce / self.banks_per_ce
self.blocks_per_bank_vfl = self.blocks_per_ce / self.banks_per_ce_vfl
self.vendorType = nand.vendorType
if self.vendorType == 0x10001:
self.virtual_to_physical = self.virtual_to_physical_10001
elif self.vendorType == 0x150011:
self.virtual_to_physical = self.virtual_to_physical_100014
elif self.vendorType in [0x100010, 0x100014, 0x120014]:
self.virtual_to_physical = self.virtual_to_physical_150011
else:
raise Exception("VSVFL: unsupported vendor 0x%x" % self.vendorType)
self.bank_address_space = nand.bank_address_space
self.vflContexts = []
self.bbt = []
self.current_version = 0
reserved_blocks = 0
if self.nand.bfn:
reserved_blocks = 16
fs_start_block = reserved_blocks+16 #XXX
for ce in xrange(self.num_ce):
vflctx = None
for b in xrange(reserved_blocks, fs_start_block):
s, d = nand.readMetaPage(ce, b, 0, _vfl_vsvfl_spare_data)
if not d:
continue
vflctx = _vfl_vsvfl_context.parse(d)
if not vfl_check_checksum(vflctx, _vfl_vsvfl_context):
vflctx = None
continue
break
if not vflctx:
raise Exception("Unable to find VSVFL context for CE %d" % ce)
MostRecentVFLCxtBlock = -1
minUsn = 0xFFFFFFFF
for b in vflctx.vfl_context_block:
s, d = nand.readMetaPage(ce, b, 0, _vfl_vsvfl_spare_data)
if not d or s.type1 != PAGETYPE_VFL:
continue
if s.foo.meta.usnDec > 0 and s.foo.meta.usnDec <= minUsn:
minUsn = s.foo.meta.usnDec;
MostRecentVFLCxtBlock = b
if MostRecentVFLCxtBlock == -1:
print "MostRecentVFLCxtBlock == -1"
return
last = None
for pageNum in xrange(0, self.pages_per_block, 1):
s,d = nand.readMetaPage(ce, MostRecentVFLCxtBlock, pageNum, _vfl_vsvfl_spare_data)
if not d or s.type1 != PAGETYPE_VFL:
break
last = d
vflctx = _vfl_vsvfl_context.parse(last)
if not vfl_check_checksum(vflctx, _vfl_vsvfl_context):
print "VSVFL checksum FAIL"
self.vflContexts.append(vflctx)
if vflctx.version == 2 and vflctx.usn_inc >= self.current_version:
self.current_version = vflctx.usn_inc
self.context = vflctx
if not self.context:
raise Exception("VSVFL open FAIL")
num_reserved = self.vflContexts[0].reserved_block_pool_start
num_non_reserved = self.blocks_per_bank_vfl - num_reserved
for ce in xrange(self.num_ce):
bbt = [0xFF] * (CEIL_DIVIDE(self.blocks_per_ce, 8))
ctx = self.vflContexts[ce]
for bank in xrange(0, self.banks_per_ce_vfl):
for i in xrange(0, num_non_reserved):
mapEntry = ctx.reserved_block_pool_map[bank*num_non_reserved + i]
if mapEntry == 0xFFF0:
continue
if mapEntry < self.blocks_per_ce:
pBlock = mapEntry
elif mapEntry > 0xFFF0:
pBlock = self.virtual_block_to_physical_block(ce + bank * self.num_ce, num_reserved + i)
else:
print "VSVFL: bad map table"
bbt[pBlock / 8] &= ~(1 << (pBlock % 8))
self.bbt.append(bbt)
print "VSVFL context open OK"
def VFL_get_FTLCtrlBlock(self):
for ctx in self.vflContexts:
if ctx.usn_inc == self.current_version:
return ctx.control_block
def virtual_to_physical_10001(self, vBank, vPage):
return vBank, vPage
def virtual_to_physical_100014(self, vBank, vPage):
pBank = vBank / self.num_ce;
pPage = ((self.pages_per_block - 1) & vPage) | (2 * (~(self.pages_per_block - 1) & vPage))
if (pBank & 1):
pPage |= self.pages_per_block
return vBank % self.num_ce, pPage
def virtual_to_physical_150011(self, vBank, vPage):
pBlock = 2 * (vPage / self.pages_per_block)
if(vBank % (2 * self.num_ce) >= self.num_ce):
pBlock += 1
return vBank % self.num_ce, self.pages_per_block * pBlock | (vPage % 128)
def virtual_block_to_physical_block(self, vBank, vBlock):
ce, pPage = self.virtual_to_physical(vBank, self.pages_per_block * vBlock)
return pPage / self.pages_per_block
def vfl_is_good_block(self, bbt, block):
if block > self.blocks_per_ce:
raise Exception("vfl_is_good_block block %d out of bounds" % block)
return (bbt[block / 8] & (1 << (block % 8))) != 0
def remap_block(self, ce, pBlock):
if self.vfl_is_good_block(self.bbt[ce], pBlock):
return pBlock
ctx = self.vflContexts[ce]
for pwDesPbn in xrange(0, self.blocks_per_ce - ctx.reserved_block_pool_start * self.banks_per_ce_vfl):
if ctx.reserved_block_pool_map[pwDesPbn] == pBlock:
vBank = ce + self.num_ce * (pwDesPbn / (self.blocks_per_bank_vfl - ctx.reserved_block_pool_start))
vBlock = ctx.reserved_block_pool_start + (pwDesPbn % (self.blocks_per_bank_vfl - ctx.reserved_block_pool_start))
z = self.virtual_block_to_physical_block(vBank, vBlock)
#print "remapped block %d => %d" % (pBlock, z)
return z
print "Bad block %d not remapped" % pBlock
return pBlock
def virtual_page_number_to_physical(self, vpn):
vBank = vpn % self.banks_total
ce = vBank % self.nand.nCEs
pBlock = self.virtual_block_to_physical_block(vBank, vpn / self.pages_per_sublk)
pBlock = self.remap_block(ce, pBlock)
bank_offset = self.bank_address_space * (pBlock / self.blocks_per_bank)
page = self.pages_per_block_2 * (bank_offset + (pBlock % self.blocks_per_bank)) \
+ ((vpn % self.pages_per_sublk) / self.banks_total)
return ce, page
def read_single_page(self, vpn, key=None, lpn=None):
ce, pPage = self.virtual_page_number_to_physical(vpn)
#print "VFL read_single_page %d => %d, %d" % (vpn,ce,pPage)
return self.nand.readPage(ce, pPage, key, lpn)

View File

@@ -0,0 +1,357 @@
from array import array
from construct.core import Struct, Union
from construct.macros import *
from progressbar import ProgressBar
from structs import *
import struct
#https://github.com/iDroid-Project/openiBoot/blob/master/openiboot/ftl-yaftl/yaftl.c
YAFTL_CXT = Struct("YAFTL_CXT",
String("version", 4),
ULInt32("unknCalculatedValue0"),
ULInt32("totalPages"),
ULInt32("latestUserBlock"),
ULInt32("cxt_unkn0_usn"),
ULInt32("latestIndexBlock"),
ULInt32("maxIndexUsn"),
ULInt32("blockStatsField4"),
ULInt32("blockStatsField10"),
ULInt32("numAllocatedBlocks"),
ULInt32("numIAllocatedBlocks"),
ULInt32("unk184_0xA"),
Array(10, ULInt32("cxt_unkn1")),
ULInt32("field_58"),
ULInt16("tocArrayLength"),
ULInt16("tocPagesPerBlock"),
ULInt16("tocEntriesPerPage"),
ULInt16("unkn_0x2A"),
ULInt16("userPagesPerBlock"),
ULInt16("unk64"),
Array(11, ULInt32("cxt_unkn2")),
ULInt8("unk188_0x63"),
)
TOCStruct = Struct("TOCStruct",
ULInt32("indexPage"),
ULInt16("cacheNum"),
ULInt16("TOCUnkMember2"),
)
BlockStats = Struct("BlockStats",
ULInt32("numAllocated"),
ULInt32("field_4"),
ULInt32("numValidDPages"),
ULInt32("numIAllocated"),
ULInt32("field_10"),
ULInt32("numValidIPages"),
ULInt32("numFree"),
ULInt32("field_1C"),
)
class YAFTL(object):
def __init__(self, vfl, usn=0):
self.vfl = vfl
self.lpnToVpn = None
bytesPerPage = vfl.nand.pageSize
numBlocks = vfl.context.usable_blocks_per_bank
self.blankPage = bytesPerPage * "\x00"
self.numBlocks = numBlocks
self.tocPagesPerBlock = vfl.pages_per_sublk * 4 / bytesPerPage
if vfl.pages_per_sublk * 4 % bytesPerPage:
self.tocPagesPerBlock += 1
self.tocEntriesPerPage = bytesPerPage / 4
self.tocArrayLength = CEIL_DIVIDE(vfl.pages_per_sublk * numBlocks * 4, bytesPerPage)
self.nPagesTocPageIndices = CEIL_DIVIDE(self.tocArrayLength * 4, bytesPerPage)
self.nPagesBlockStatuses = CEIL_DIVIDE(numBlocks * 1, bytesPerPage)
self.nPagesBlockReadCounts = CEIL_DIVIDE(numBlocks * 2, bytesPerPage)
self.nPagesBlockEraseCounts = CEIL_DIVIDE(numBlocks * 4, bytesPerPage)
self.nPagesBlockValidPagesDNumbers = self.nPagesBlockReadCounts
self.nPagesBlockValidPagesINumbers = self.nPagesBlockReadCounts
self.ctrlBlockPageOffset = self.nPagesTocPageIndices \
+ self.nPagesBlockStatuses \
+ self.nPagesBlockReadCounts \
+ self.nPagesBlockEraseCounts \
+ self.nPagesBlockValidPagesDNumbers \
+ self.nPagesBlockValidPagesINumbers \
+ 2 * self.tocPagesPerBlock \
+ 2
self.totalPages = (self.numBlocks - 8) * (self.vfl.pages_per_sublk - self.tocPagesPerBlock)# - unknCalculatedValue0
self.userPagesPerBlock = self.vfl.pages_per_sublk - self.tocPagesPerBlock
maxUsn = 0
ftlCtrlBlock = -1
for b in self.vfl.VFL_get_FTLCtrlBlock():
s,d = self.YAFTL_readPage(b * self.vfl.pages_per_sublk)
if not d:
continue
if usn and s.usn > usn:
break
if s.usn > maxUsn:
maxUsn = s.usn
ftlCtrlBlock = b
if ftlCtrlBlock == -1 or not maxUsn:
print "ftlCtrlBlock not found, restore needed"
self.YAFTL_restore()
return
i = 0
maxUsn = 0
while i < self.vfl.pages_per_sublk - self.ctrlBlockPageOffset:
s,d = self.YAFTL_readPage(ftlCtrlBlock*self.vfl.pages_per_sublk + i + self.ctrlBlockPageOffset)
if not d:
if self.YAFTL_readCxtInfo(ftlCtrlBlock*self.vfl.pages_per_sublk + i):
return
print "YaFTL_readCxtInfo FAIL, restore needed maxUsn=%d" % maxUsn
self.YAFTL_restore()
return
if s and s.usn > maxUsn:
maxUsn = s.usn
i += self.ctrlBlockPageOffset + 1
print "YaFTL open fail"
self.YAFTL_restore()
def readBTOCPages(self, block, maxVal):
data = ""
for i in xrange(self.tocPagesPerBlock):
s,d = self.YAFTL_readPage((block+1) * self.vfl.pages_per_sublk - self.tocPagesPerBlock + i)
if not s:
return None
data += d
btoc = array("I",data)
for i in xrange(len(btoc)):
if btoc[i] > maxVal:
btoc[i] = 0xFFFFFFFF
return btoc
def YAFTL_restore(self):
self.lpnToVpn = self.vfl.nand.loadCachedData("yaftlrestore")
if self.lpnToVpn:
print "Found cached FTL restore information"
return
userBlocks = {}
indexBlocks = {}
print "FTL restore in progress"
pbar = ProgressBar(self.numBlocks)
pbar.start()
for b in xrange(0, self.numBlocks):
pbar.update(b)
#read fist page in block, if empty then block is empty
s,d = self.YAFTL_readPage(b * self.vfl.pages_per_sublk + 0)
if not s:
continue
if s.type == PAGETYPE_INDEX:
indexBlocks[s.usn] = b
elif s.type == PAGETYPE_LBN:
if userBlocks.has_key(s.usn):
print "Two blocks with same USN, something is weird"
userBlocks[s.usn] = b
elif s.type == PAGETYPE_FTL_CLEAN:
pass
pbar.finish()
lpnToVpn = {}
for usn in sorted(userBlocks.keys(), reverse=True):
b = userBlocks[usn]
btoc = self.readBTOCPages(b, self.totalPages)
if btoc:
for i in xrange(self.userPagesPerBlock-1,-1, -1):
if not lpnToVpn.has_key(btoc[i]):
lpnToVpn[btoc[i]] = b * self.vfl.pages_per_sublk + i
else:
print "BTOC not found for block %d (usn %d), scanning all pages" % (b, usn)
i = 0
for p in xrange(self.vfl.pages_per_sublk - self.tocPagesPerBlock -1, -1, -1):
s,d = self.YAFTL_readPage(b * self.vfl.pages_per_sublk + p)
if s:
i+= 1
if s and not lpnToVpn.has_key(s.lpn):
lpnToVpn[s.lpn] = b * self.vfl.pages_per_sublk + p
print "%d used pages in block" % i
self.vfl.nand.cacheData("yaftlrestore", lpnToVpn)
self.lpnToVpn = lpnToVpn
return lpnToVpn
def YAFTL_readCxtInfo(self, page):
s,d = self.YAFTL_readPage(page)
if not s or s.type != PAGETYPE_FTL_CLEAN:
return False
ctx = YAFTL_CXT.parse(d)
ctx.spareUsn = s.usn
if ctx.version != "CX01":
print "Wrong FTL version %s" % ctx.version
return False
self.usn = s.usn
pageToRead = page + 1;
userTOCBuffer = self.YAFTL_read_n_Page(pageToRead, self.tocPagesPerBlock)
if not userTOCBuffer:
raise(Exception("userTOCBuffer"))
pageToRead += self.tocPagesPerBlock
indexTOCBuffer = self.YAFTL_read_n_Page(pageToRead, self.tocPagesPerBlock)
pageToRead += self.tocPagesPerBlock + 1
tocArrayIndexPages = self.YAFTL_read_n_Page(pageToRead, self.nPagesTocPageIndices)
self.tocArrayIndexPages = array("I", tocArrayIndexPages)
assert self.tocArrayIndexPages.itemsize == 4
self.indexCache = {}
pageToRead += self.nPagesTocPageIndices
if False: #we don't care, we just want to read
blockStatuses = self.YAFTL_read_n_Page(pageToRead, self.nPagesBlockStatuses)
pageToRead += self.nPagesBlockStatuses
blockReadCounts = self.YAFTL_read_n_Page(pageToRead, self.nPagesBlockReadCounts)
pageToRead += self.nPagesBlockReadCounts
blockEraseCounts = self.YAFTL_read_n_Page(pageToRead, self.nPagesBlockEraseCounts)
pageToRead += self.nPagesBlockEraseCounts
validPagesINo = self.YAFTL_read_n_Page(pageToRead, self.nPagesBlockValidPagesINumbers)
pageToRead += self.nPagesBlockValidPagesINumbers
validPagesDNo = self.YAFTL_read_n_Page(pageToRead, self.nPagesBlockValidPagesDNumbers)
print "YaFTL context OK, version=%s maxIndexUsn=%d context usn=%d" % (ctx.version, ctx.maxIndexUsn, self.usn)
return True
def YAFTL_read_n_Page(self, page, n, failIfBlank=False):
r = ""
for i in xrange(0, n):
s,d = self.YAFTL_readPage(page +i)
if not d:
if failIfBlank:
return
return r
r += d
return r
def YAFTL_readPage(self, page, key=META_KEY, lpn=None):
return self.vfl.read_single_page(page, key, lpn)
def build_lpn_to_vpn(self):
lpnToVpn = {}
for p in xrange(self.totalPages):
x = self.translateLPNtoVPN(p)
if x != 0xFFFFFFFF:
lpnToVpn[p] = x
self.vfl.nand.cacheData("currentftl", lpnToVpn)
return lpnToVpn
def translateLPNtoVPN(self, lpn):
if self.lpnToVpn:
return self.lpnToVpn.get(lpn, 0xFFFFFFFF)
tocPageNum = (lpn) / self.tocEntriesPerPage
indexPage = self.tocArrayIndexPages[tocPageNum]
if indexPage == 0xffffffff:
return 0xffffffff
#print "indexPage %x" % indexPage
if self.indexCache.has_key(indexPage):
tocPageBuffer = self.indexCache[indexPage]
else:
s,tocPageBuffer = self.YAFTL_readPage(indexPage)
if not tocPageBuffer:
print "tocPageBuffer fail"
return 0xffffffff
assert s.type == PAGETYPE_INDEX
tocPageBuffer = array("I", tocPageBuffer)
self.indexCache[indexPage] = tocPageBuffer
tocEntry = tocPageBuffer[lpn % self.tocEntriesPerPage]
return tocEntry
def readLPN(self, lpn, key=None):#, nPages):
vpn = self.translateLPNtoVPN(lpn)
if vpn == 0xffffffff:
return self.blankPage
#print "tocEntry %d" % tocEntry
#print "FTL %d => %d" % (lpn, vpn)
s,d = self.YAFTL_readPage(vpn, key, lpn)
if d == None:
return self.blankPage
if s.lpn != lpn:
raise Exception("YAFTL translation FAIL spare lpn=%d vs expected %d" % (s.lpn, lpn))
return d
def YAFTL_lookup1(self):
hax = self.vfl.nand.loadCachedData("YAFTL_lookup1")
if hax:
print "Found cached FTL lookup table"
return hax
userBlocks = {}
indexBlocks = {}
print "Building FTL lookup table v1"
pbar = ProgressBar(self.numBlocks)
pbar.start()
for b in xrange(0, self.numBlocks):
pbar.update(b)
#read fist page in block, if empty then block is empty
s,d = self.YAFTL_readPage(b * self.vfl.pages_per_sublk + 0)
if not s:
continue
if s.type == PAGETYPE_INDEX:
indexBlocks[s.usn] = b
elif s.type == PAGETYPE_LBN:
if userBlocks.has_key(s.usn):
print "Two blocks with same USN, something is weird"
userBlocks[s.usn] = b
elif s.type == PAGETYPE_FTL_CLEAN:
pass#print b, "ftl block"
pbar.finish()
lpnToVpn = {}
for usn in sorted(userBlocks.keys(), reverse=False):
b = userBlocks[usn]
btoc = self.readBTOCPages(b, self.totalPages)
#print usn, b
if btoc:
for i in xrange(self.userPagesPerBlock-1,-1, -1):
lpnToVpn.setdefault(btoc[i], []).append(b * self.vfl.pages_per_sublk + i)
else:
#print "btoc not found for block %d (usn %d), scanning all pages" % (b, usn)
i = 0
usn = -1
for p in xrange(self.vfl.pages_per_sublk - self.tocPagesPerBlock -1, -1, -1):
s,d = self.YAFTL_readPage(b * self.vfl.pages_per_sublk + p)
if not s:
break
i+= 1
if usn == -1:
usn = s.usn
if usn != s.usn:
#print "Two usns in same block %d %d" % (usn, s.usn)
usn = s.usn
lpnToVpn.setdefault(s.lpn, []).append(b * self.vfl.pages_per_sublk + p)
#print "%d used pages in block" % i
#self.vfl.nand.cacheData("YAFTL_lookup1", (lpnToVpn, userBlocks))
return lpnToVpn, userBlocks
def YAFTL_hax2(self):
hax = self.vfl.nand.loadCachedData("YAFTL_hax2")
if hax:
print "Found cached FTL HAX2 information"
return hax
print "FTL hax2 in progress"
pbar = ProgressBar(self.numBlocks)
pbar.start()
lpnToVpn = {}
for b in xrange(0, self.numBlocks):
pbar.update(b)
#read fist page in block, if empty then block is empty (right?)
s,d = self.YAFTL_readPage(b * self.vfl.pages_per_sublk + 0)
if not s:
continue
if s.type == PAGETYPE_LBN:
i = 0
usn = -1
for p in xrange(0, self.vfl.pages_per_sublk - self.tocPagesPerBlock):
s,d = self.YAFTL_readPage(b * self.vfl.pages_per_sublk + p)
if not s:
break
lpnToVpn.setdefault(s.lpn, {}).setdefault(s.usn, []).append(b * self.vfl.pages_per_sublk + p)
i+= 1
pbar.finish()
self.vfl.nand.cacheData("YAFTL_hax2", lpnToVpn)
return lpnToVpn
def block_lpn_to_vpn(self, block):
res = {}
for p in xrange(0, self.vfl.pages_per_sublk - self.tocPagesPerBlock):
s,d = self.YAFTL_readPage(block * self.vfl.pages_per_sublk + p)
if not s:
break
res[s.lpn] = block * self.vfl.pages_per_sublk + p
return res

View File

@@ -0,0 +1,246 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# usbmux.py - usbmux client library for Python
#
# Copyright (C) 2009 Hector Martin "marcan" <hector@marcansoft.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 or version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import socket, struct, select, sys
try:
import plistlib
haveplist = True
except:
haveplist = False
class MuxError(Exception):
pass
class MuxVersionError(MuxError):
pass
class SafeStreamSocket:
def __init__(self, address, family):
self.sock = socket.socket(family, socket.SOCK_STREAM)
self.sock.connect(address)
def send(self, msg):
totalsent = 0
while totalsent < len(msg):
sent = self.sock.send(msg[totalsent:])
if sent == 0:
raise MuxError("socket connection broken")
totalsent = totalsent + sent
def recv(self, size):
msg = ''
while len(msg) < size:
chunk = self.sock.recv(size-len(msg))
if chunk == '':
raise MuxError("socket connection broken")
msg = msg + chunk
return msg
class MuxDevice(object):
def __init__(self, devid, usbprod, serial, location):
self.devid = devid
self.usbprod = usbprod
self.serial = serial
self.location = location
def __str__(self):
return "<MuxDevice: ID %d ProdID 0x%04x Serial '%s' Location 0x%x>"%(self.devid, self.usbprod, self.serial, self.location)
class BinaryProtocol(object):
TYPE_RESULT = 1
TYPE_CONNECT = 2
TYPE_LISTEN = 3
TYPE_DEVICE_ADD = 4
TYPE_DEVICE_REMOVE = 5
VERSION = 0
def __init__(self, socket):
self.socket = socket
self.connected = False
def _pack(self, req, payload):
if req == self.TYPE_CONNECT:
return struct.pack("IH", payload['DeviceID'], payload['PortNumber']) + "\x00\x00"
elif req == self.TYPE_LISTEN:
return ""
else:
raise ValueError("Invalid outgoing request type %d"%req)
def _unpack(self, resp, payload):
if resp == self.TYPE_RESULT:
return {'Number':struct.unpack("I", payload)[0]}
elif resp == self.TYPE_DEVICE_ADD:
devid, usbpid, serial, pad, location = struct.unpack("IH256sHI", payload)
serial = serial.split("\0")[0]
return {'DeviceID': devid, 'Properties': {'LocationID': location, 'SerialNumber': serial, 'ProductID': usbpid}}
elif resp == self.TYPE_DEVICE_REMOVE:
devid = struct.unpack("I", payload)[0]
return {'DeviceID': devid}
else:
raise MuxError("Invalid incoming request type %d"%req)
def sendpacket(self, req, tag, payload={}):
payload = self._pack(req, payload)
if self.connected:
raise MuxError("Mux is connected, cannot issue control packets")
length = 16 + len(payload)
data = struct.pack("IIII", length, self.VERSION, req, tag) + payload
self.socket.send(data)
def getpacket(self):
if self.connected:
raise MuxError("Mux is connected, cannot issue control packets")
dlen = self.socket.recv(4)
dlen = struct.unpack("I", dlen)[0]
body = self.socket.recv(dlen - 4)
version, resp, tag = struct.unpack("III",body[:0xc])
if version != self.VERSION:
raise MuxVersionError("Version mismatch: expected %d, got %d"%(self.VERSION,version))
payload = self._unpack(resp, body[0xc:])
return (resp, tag, payload)
class PlistProtocol(BinaryProtocol):
TYPE_RESULT = "Result"
TYPE_CONNECT = "Connect"
TYPE_LISTEN = "Listen"
TYPE_DEVICE_ADD = "Attached"
TYPE_DEVICE_REMOVE = "Detached" #???
TYPE_PLIST = 8
VERSION = 1
def __init__(self, socket):
if not haveplist:
raise Exception("You need the plistlib module")
BinaryProtocol.__init__(self, socket)
def _pack(self, req, payload):
return payload
def _unpack(self, resp, payload):
return payload
def sendpacket(self, req, tag, payload={}):
payload['ClientVersionString'] = 'usbmux.py by marcan'
if isinstance(req, int):
req = [self.TYPE_CONNECT, self.TYPE_LISTEN][req-2]
payload['MessageType'] = req
payload['ProgName'] = 'tcprelay'
BinaryProtocol.sendpacket(self, self.TYPE_PLIST, tag, plistlib.writePlistToString(payload))
def getpacket(self):
resp, tag, payload = BinaryProtocol.getpacket(self)
if resp != self.TYPE_PLIST:
raise MuxError("Received non-plist type %d"%resp)
payload = plistlib.readPlistFromString(payload)
return payload['MessageType'], tag, payload
class MuxConnection(object):
def __init__(self, socketpath, protoclass):
self.socketpath = socketpath
if sys.platform in ['win32', 'cygwin']:
family = socket.AF_INET
address = ('127.0.0.1', 27015)
else:
family = socket.AF_UNIX
address = self.socketpath
self.socket = SafeStreamSocket(address, family)
self.proto = protoclass(self.socket)
self.pkttag = 1
self.devices = []
def _getreply(self):
while True:
resp, tag, data = self.proto.getpacket()
if resp == self.proto.TYPE_RESULT:
return tag, data
else:
raise MuxError("Invalid packet type received: %d"%resp)
def _processpacket(self):
resp, tag, data = self.proto.getpacket()
if resp == self.proto.TYPE_DEVICE_ADD:
self.devices.append(MuxDevice(data['DeviceID'], data['Properties']['ProductID'], data['Properties']['SerialNumber'], data['Properties']['LocationID']))
elif resp == self.proto.TYPE_DEVICE_REMOVE:
for dev in self.devices:
if dev.devid == data['DeviceID']:
self.devices.remove(dev)
elif resp == self.proto.TYPE_RESULT:
raise MuxError("Unexpected result: %d"%resp)
else:
raise MuxError("Invalid packet type received: %d"%resp)
def _exchange(self, req, payload={}):
mytag = self.pkttag
self.pkttag += 1
self.proto.sendpacket(req, mytag, payload)
recvtag, data = self._getreply()
if recvtag != mytag:
raise MuxError("Reply tag mismatch: expected %d, got %d"%(mytag, recvtag))
return data['Number']
def listen(self):
ret = self._exchange(self.proto.TYPE_LISTEN)
if ret != 0:
raise MuxError("Listen failed: error %d"%ret)
def process(self, timeout=None):
if self.proto.connected:
raise MuxError("Socket is connected, cannot process listener events")
rlo, wlo, xlo = select.select([self.socket.sock], [], [self.socket.sock], timeout)
if xlo:
self.socket.sock.close()
raise MuxError("Exception in listener socket")
if rlo:
self._processpacket()
def connect(self, device, port):
ret = self._exchange(self.proto.TYPE_CONNECT, {'DeviceID':device.devid, 'PortNumber':((port<<8) & 0xFF00) | (port>>8)})
if ret != 0:
raise MuxError("Connect failed: error %d"%ret)
self.proto.connected = True
return self.socket.sock
def close(self):
self.socket.sock.close()
class USBMux(object):
def __init__(self, socketpath=None):
if socketpath is None:
if sys.platform == 'darwin':
socketpath = "/var/run/usbmuxd"
else:
socketpath = "/var/run/usbmuxd"
self.socketpath = socketpath
self.listener = MuxConnection(socketpath, BinaryProtocol)
try:
self.listener.listen()
self.version = 0
self.protoclass = BinaryProtocol
except MuxVersionError:
self.listener = MuxConnection(socketpath, PlistProtocol)
self.listener.listen()
self.protoclass = PlistProtocol
self.version = 1
self.devices = self.listener.devices
def process(self, timeout=None):
self.listener.process(timeout)
def connect(self, device, port):
connector = MuxConnection(self.socketpath, self.protoclass)
return connector.connect(device, port)
if __name__ == "__main__":
mux = USBMux()
print "Waiting for devices..."
if not mux.devices:
mux.process(0.1)
while True:
print "Devices:"
for dev in mux.devices:
print dev
mux.process()

View File

@@ -0,0 +1,123 @@
import glob
import plistlib
import os
from bplist import BPlistReader
import cPickle
import gzip
def read_file(filename):
f = open(filename, "rb")
data = f.read()
f.close()
return data
def write_file(filename,data):
f = open(filename, "wb")
f.write(data)
f.close()
def makedirs(dirs):
try:
os.makedirs(dirs)
except:
pass
def getHomePath(foldername, filename):
home = os.path.expanduser('~')
folderpath = os.path.join(home, foldername)
if not os.path.exists(folderpath):
makedirs(folderpath)
return os.path.join(folderpath, filename)
def readHomeFile(foldername, filename):
path = getHomePath(foldername, filename)
if not os.path.exists(path):
return None
return read_file(path)
#return path to HOME+foldername+filename
def writeHomeFile(foldername, filename, data):
filepath = getHomePath(foldername, filename)
write_file(filepath, data)
return filepath
def readPlist(filename):
f = open(filename,"rb")
d = f.read(16)
f.close()
if d.startswith("bplist"):
return BPlistReader.plistWithFile(filename)
else:
return plistlib.readPlist(filename)
def parsePlist(s):
if s.startswith("bplist"):
return BPlistReader.plistWithString(s)
else:
return plistlib.readPlistFromString(s)
#http://stackoverflow.com/questions/1094841/reusable-library-to-get-human-readable-version-of-file-size
def sizeof_fmt(num):
for x in ['bytes','KB','MB','GB','TB']:
if num < 1024.0:
return "%d%s" % (num, x)
num /= 1024.0
#http://www.5dollarwhitebox.org/drupal/node/84
def convert_bytes(bytes):
bytes = float(bytes)
if bytes >= 1099511627776:
terabytes = bytes / 1099511627776
size = '%.2fT' % terabytes
elif bytes >= 1073741824:
gigabytes = bytes / 1073741824
size = '%.2fG' % gigabytes
elif bytes >= 1048576:
megabytes = bytes / 1048576
size = '%.2fM' % megabytes
elif bytes >= 1024:
kilobytes = bytes / 1024
size = '%.2fK' % kilobytes
else:
size = '%.2fb' % bytes
return size
def xor_strings(a,b):
r=""
for i in xrange(len(a)):
r+= chr(ord(a[i])^ord(b[i]))
return r
hex = lambda data: " ".join("%02X" % ord(i) for i in data)
ascii = lambda data: "".join(c if 31 < ord(c) < 127 else "." for c in data)
def hexdump(d):
for i in xrange(0,len(d),16):
data = d[i:i+16]
print "%08X | %s | %s" % (i, hex(data).ljust(47), ascii(data))
def search_plist(directory, matchDict):
for p in map(os.path.normpath, glob.glob(directory + "/*.plist")):
try:
d = plistlib.readPlist(p)
ok = True
for k,v in matchDict.items():
if d.get(k) != v:
ok = False
break
if ok:
print "Using plist file %s" % p
return d
except:
continue
def save_pickle(filename,data):
f = gzip.open(filename,"wb")
cPickle.dump(data, f, cPickle.HIGHEST_PROTOCOL)
f.close()
def load_pickle(filename):
f = gzip.open(filename,"rb")
data = cPickle.load(f)
f.close()
return data

View File

@@ -0,0 +1,29 @@
def print_table(title, headers, rows):
widths = []
for i in xrange(len(headers)):
z = map(len, [str(row[i]) for row in rows])
z.append(len(headers[i]))
widths.append(max(z))
width = sum(widths) + len(headers) + 1
print "-"* width
print "|" + title.center(width-2) + "|"
print "-"* width
hline = "|"
for i in xrange(len(headers)):
hline += headers[i].ljust(widths[i]) + "|"
print hline
print "-"* width
for row in rows:
line = "|"
for i in xrange(len(row)):
line += str(row[i]).ljust(widths[i]) + "|"
print line
if len(rows) == 0:
print "|" + "No entries".center(width-2) + "|"
print "-"* width
print ""

View File

@@ -0,0 +1,139 @@
import os
import sys
from util import sizeof_fmt, hexdump
from progressbar import ProgressBar
from crypto.aes import AESdecryptCBC, AESencryptCBC
class FileBlockDevice(object):
def __init__(self, filename, offset=0, write=False):
flag = os.O_RDONLY if not write else os.O_RDWR
if sys.platform == 'win32':
flag = flag | os.O_BINARY
self.filename = filename
self.fd = os.open(filename, flag)
self.offset = offset
self.writeFlag = write
self.size = os.path.getsize(filename)
self.setBlockSize(8192)
def setBlockSize(self, bs):
self.blockSize = bs
self.nBlocks = self.size / bs
def readBlock(self, blockNum):
os.lseek(self.fd, self.offset + self.blockSize * blockNum, os.SEEK_SET)
return os.read(self.fd, self.blockSize)
def write(self, offset, data):
if self.writeFlag: #fail silently for testing
os.lseek(self.fd, self.offset + offset, os.SEEK_SET)
return os.write(self.fd, data)
def writeBlock(self, lba, block):
return self.write(lba*self.blockSize, block)
class FTLBlockDevice(object):
def __init__(self, nand, first_lba, last_lba, defaultKey=None):
self.nand = nand
self.pageSize = nand.pageSize
self.blockSize = 0 #not used
self.key = defaultKey
self.lbaoffset = first_lba
self.last_lba = last_lba
self.setBlockSize(self.pageSize)
def setBlockSize(self, bs):
self.blockSize = bs
self.lbasPerPage = self.pageSize / bs
self.lbaToLpnFactor = bs / (self.pageSize+0.0)
self.pagesPerLBA = bs / self.pageSize
if bs > self.pageSize:
pass#raise Exception("FTLBlockDevice lba-size > pageSize not handled")
def readBlock(self, blockNum):
#if (self.lbaoffset + blockNum / self.lbasPerPage) > self.last_lba:
# print "readBlock past last lba", blockNum
# print "readBlock past last lba", blockNum
# return "\x00" * self.blockSize
lpn = int(self.lbaoffset + blockNum * self.lbaToLpnFactor)
d = self.nand.readLPN(lpn, self.key)
for i in xrange(1, self.pagesPerLBA):
d += self.nand.readLPN(lpn + i, self.key)
if self.lbasPerPage:
zz = blockNum % self.lbasPerPage
return d[zz*self.blockSize:(zz+1)*self.blockSize]
return d
def write(self, offset, data):
raise Exception("FTLBlockDevice write method not implemented")
def writeBlock(self, lba, block):
raise Exception("FTLBlockDevice writeBlock method not implemented")
def dumpToFile(self, outputfilename):
hs = sizeof_fmt((self.last_lba - self.lbaoffset) * self.pageSize)
print "Dumping partition to %s (%s)" % (outputfilename, hs)
flags = os.O_CREAT | os.O_RDWR
if sys.platform == "win32":
flags |= os.O_BINARY
fd=os.open(outputfilename, flags)
pbar = ProgressBar(self.last_lba - self.lbaoffset - 1)
pbar.start()
for i in xrange(self.lbaoffset, self.last_lba):
pbar.update(i-self.lbaoffset)
d = self.nand.readLPN(i, self.key)
if i == self.lbaoffset and d[0x400:0x402] != "HX":
print "FAIL? Not HFS partition or wrong key"
os.write(fd, d)
pbar.finish()
os.close(fd)
class IMG3BlockDevice(object):
def __init__(self, filename, key, iv, write=False):
flag = os.O_RDONLY if not write else os.O_RDWR
if sys.platform == 'win32':
flag = flag | os.O_BINARY
self.filename = filename
self.fd = os.open(filename, flag)
self.writeFlag = write
d = os.read(self.fd, 8192)
if d[:4] != "3gmI":
raise Exception("IMG3BlockDevice bad magic %s" % d[:4])
if d[0x34:0x38] != "ATAD":
raise Exception("Fu")
self.encrypted = True
self.key = key
self.iv0 = iv
self.offset = 0x40
self.size = os.path.getsize(filename)
self.setBlockSize(8192)
def setBlockSize(self, bs):
self.blockSize = bs
self.nBlocks = self.size / bs
self.ivs = {0: self.iv0}
def getIVforBlock(self, blockNum):
#read last 16 bytes of previous block to get IV
if not self.ivs.has_key(blockNum):
os.lseek(self.fd, self.offset + self.blockSize * blockNum - 16, os.SEEK_SET)
self.ivs[blockNum] = os.read(self.fd, 16)
return self.ivs[blockNum]
def readBlock(self, blockNum):
os.lseek(self.fd, self.offset + self.blockSize * blockNum, os.SEEK_SET)
data = os.read(self.fd, self.blockSize)
if self.encrypted:
data = AESdecryptCBC(data, self.key, self.getIVforBlock(blockNum))
return data
def _write(self, offset, data):
if self.writeFlag: #fail silently for testing
os.lseek(self.fd, self.offset + offset, os.SEEK_SET)
return os.write(self.fd, data)
def writeBlock(self, lba, data):
if self.encrypted:
data = AESencryptCBC(data, self.key, self.getIVforBlock(lba))
return self._write(lba*self.blockSize, data)

View File

@@ -0,0 +1,251 @@
"""
http://github.com/farcaller/bplist-python/blob/master/bplist.py
"""
import struct
import plistlib
from datetime import datetime, timedelta
class BPListWriter(object):
def __init__(self, objects):
self.bplist = ""
self.objects = objects
def binary(self):
'''binary -> string
Generates bplist
'''
self.data = 'bplist00'
# TODO: flatten objects and count max length size
# TODO: write objects and save offsets
# TODO: write offsets
# TODO: write metadata
return self.data
def write(self, filename):
'''
Writes bplist to file
'''
if self.bplist != "":
pass
# TODO: save self.bplist to file
else:
raise Exception('BPlist not yet generated')
class BPlistReader(object):
def __init__(self, s):
self.data = s
self.objects = []
self.resolved = {}
def __unpackIntStruct(self, sz, s):
'''__unpackIntStruct(size, string) -> int
Unpacks the integer of given size (1, 2 or 4 bytes) from string
'''
if sz == 1:
ot = '!B'
elif sz == 2:
ot = '!H'
elif sz == 4:
ot = '!I'
elif sz == 8:
ot = '!Q'
else:
raise Exception('int unpack size '+str(sz)+' unsupported')
return struct.unpack(ot, s)[0]
def __unpackInt(self, offset):
'''__unpackInt(offset) -> int
Unpacks int field from plist at given offset
'''
return self.__unpackIntMeta(offset)[1]
def __unpackIntMeta(self, offset):
'''__unpackIntMeta(offset) -> (size, int)
Unpacks int field from plist at given offset and returns its size and value
'''
obj_header = struct.unpack('!B', self.data[offset])[0]
obj_type, obj_info = (obj_header & 0xF0), (obj_header & 0x0F)
int_sz = 2**obj_info
return int_sz, self.__unpackIntStruct(int_sz, self.data[offset+1:offset+1+int_sz])
def __resolveIntSize(self, obj_info, offset):
'''__resolveIntSize(obj_info, offset) -> (count, offset)
Calculates count of objref* array entries and returns count and offset to first element
'''
if obj_info == 0x0F:
ofs, obj_count = self.__unpackIntMeta(offset+1)
objref = offset+2+ofs
else:
obj_count = obj_info
objref = offset+1
return obj_count, objref
def __unpackFloatStruct(self, sz, s):
'''__unpackFloatStruct(size, string) -> float
Unpacks the float of given size (4 or 8 bytes) from string
'''
if sz == 4:
ot = '!f'
elif sz == 8:
ot = '!d'
else:
raise Exception('float unpack size '+str(sz)+' unsupported')
return struct.unpack(ot, s)[0]
def __unpackFloat(self, offset):
'''__unpackFloat(offset) -> float
Unpacks float field from plist at given offset
'''
obj_header = struct.unpack('!B', self.data[offset])[0]
obj_type, obj_info = (obj_header & 0xF0), (obj_header & 0x0F)
int_sz = 2**obj_info
return int_sz, self.__unpackFloatStruct(int_sz, self.data[offset+1:offset+1+int_sz])
def __unpackDate(self, offset):
td = int(struct.unpack(">d", self.data[offset+1:offset+9])[0])
return datetime(year=2001,month=1,day=1) + timedelta(seconds=td)
def __unpackItem(self, offset):
'''__unpackItem(offset)
Unpacks and returns an item from plist
'''
obj_header = struct.unpack('!B', self.data[offset])[0]
obj_type, obj_info = (obj_header & 0xF0), (obj_header & 0x0F)
if obj_type == 0x00:
if obj_info == 0x00: # null 0000 0000
return None
elif obj_info == 0x08: # bool 0000 1000 // false
return False
elif obj_info == 0x09: # bool 0000 1001 // true
return True
elif obj_info == 0x0F: # fill 0000 1111 // fill byte
raise Exception("0x0F Not Implemented") # this is really pad byte, FIXME
else:
raise Exception('unpack item type '+str(obj_header)+' at '+str(offset)+ 'failed')
elif obj_type == 0x10: # int 0001 nnnn ... // # of bytes is 2^nnnn, big-endian bytes
return self.__unpackInt(offset)
elif obj_type == 0x20: # real 0010 nnnn ... // # of bytes is 2^nnnn, big-endian bytes
return self.__unpackFloat(offset)
elif obj_type == 0x30: # date 0011 0011 ... // 8 byte float follows, big-endian bytes
return self.__unpackDate(offset)
elif obj_type == 0x40: # data 0100 nnnn [int] ... // nnnn is number of bytes unless 1111 then int count follows, followed by bytes
obj_count, objref = self.__resolveIntSize(obj_info, offset)
return plistlib.Data(self.data[objref:objref+obj_count]) # XXX: we return data as str
elif obj_type == 0x50: # string 0101 nnnn [int] ... // ASCII string, nnnn is # of chars, else 1111 then int count, then bytes
obj_count, objref = self.__resolveIntSize(obj_info, offset)
return self.data[objref:objref+obj_count]
elif obj_type == 0x60: # string 0110 nnnn [int] ... // Unicode string, nnnn is # of chars, else 1111 then int count, then big-endian 2-byte uint16_t
obj_count, objref = self.__resolveIntSize(obj_info, offset)
return self.data[objref:objref+obj_count*2].decode('utf-16be')
elif obj_type == 0x80: # uid 1000 nnnn ... // nnnn+1 is # of bytes
# FIXME: Accept as a string for now
obj_count, objref = self.__resolveIntSize(obj_info, offset)
return plistlib.Data(self.data[objref:objref+obj_count])
elif obj_type == 0xA0: # array 1010 nnnn [int] objref* // nnnn is count, unless '1111', then int count follows
obj_count, objref = self.__resolveIntSize(obj_info, offset)
arr = []
for i in range(obj_count):
arr.append(self.__unpackIntStruct(self.object_ref_size, self.data[objref+i*self.object_ref_size:objref+i*self.object_ref_size+self.object_ref_size]))
return arr
elif obj_type == 0xC0: # set 1100 nnnn [int] objref* // nnnn is count, unless '1111', then int count follows
# XXX: not serializable via apple implementation
raise Exception("0xC0 Not Implemented") # FIXME: implement
elif obj_type == 0xD0: # dict 1101 nnnn [int] keyref* objref* // nnnn is count, unless '1111', then int count follows
obj_count, objref = self.__resolveIntSize(obj_info, offset)
keys = []
for i in range(obj_count):
keys.append(self.__unpackIntStruct(self.object_ref_size, self.data[objref+i*self.object_ref_size:objref+i*self.object_ref_size+self.object_ref_size]))
values = []
objref += obj_count*self.object_ref_size
for i in range(obj_count):
values.append(self.__unpackIntStruct(self.object_ref_size, self.data[objref+i*self.object_ref_size:objref+i*self.object_ref_size+self.object_ref_size]))
dic = {}
for i in range(obj_count):
dic[keys[i]] = values[i]
return dic
else:
raise Exception('don\'t know how to unpack obj type '+hex(obj_type)+' at '+str(offset))
def __resolveObject(self, idx):
try:
return self.resolved[idx]
except KeyError:
obj = self.objects[idx]
if type(obj) == list:
newArr = []
for i in obj:
newArr.append(self.__resolveObject(i))
self.resolved[idx] = newArr
return newArr
if type(obj) == dict:
newDic = {}
for k,v in obj.iteritems():
rk = self.__resolveObject(k)
rv = self.__resolveObject(v)
newDic[rk] = rv
self.resolved[idx] = newDic
return newDic
else:
self.resolved[idx] = obj
return obj
def parse(self):
# read header
if self.data[:8] != 'bplist00':
raise Exception('Bad magic')
# read trailer
self.offset_size, self.object_ref_size, self.number_of_objects, self.top_object, self.table_offset = struct.unpack('!6xBB4xI4xI4xI', self.data[-32:])
#print "** plist offset_size:",self.offset_size,"objref_size:",self.object_ref_size,"num_objs:",self.number_of_objects,"top:",self.top_object,"table_ofs:",self.table_offset
# read offset table
self.offset_table = self.data[self.table_offset:-32]
self.offsets = []
ot = self.offset_table
for i in xrange(self.number_of_objects):
offset_entry = ot[:self.offset_size]
ot = ot[self.offset_size:]
self.offsets.append(self.__unpackIntStruct(self.offset_size, offset_entry))
#print "** plist offsets:",self.offsets
# read object table
self.objects = []
k = 0
for i in self.offsets:
obj = self.__unpackItem(i)
#print "** plist unpacked",k,type(obj),obj,"at",i
k += 1
self.objects.append(obj)
# rebuild object tree
#for i in range(len(self.objects)):
# self.__resolveObject(i)
# return root object
return self.__resolveObject(self.top_object)
@classmethod
def plistWithString(cls, s):
parser = cls(s)
return parser.parse()
@classmethod
def plistWithFile(cls, f):
file = open(f,"rb")
parser = cls(file.read())
file.close()
return parser.parse()

View File

@@ -0,0 +1,71 @@
from keystore.keybag import Keybag
from keystore.effaceable import EffaceableLockers
from util.ramdiskclient import RamdiskToolClient
import plistlib
COMPLEXITY={
0: "4 digits",
1: "n digits",
2: "n alphanum"
}
def checkPasscodeComplexity(data_volume):
pl = data_volume.readFile("/mobile/Library/ConfigurationProfiles/UserSettings.plist", returnString=True)
if not pl:
print "Failed to read UserSettings.plist, assuming simple passcode"
return 0
pl = plistlib.readPlistFromString(pl)
#print "passcodeKeyboardComplexity :", pl["restrictedValue"]["passcodeKeyboardComplexity"]
value = pl["restrictedValue"]["passcodeKeyboardComplexity"]["value"]
print "passcodeKeyboardComplexity %d => %s" % (value, COMPLEXITY.get(value))
return pl["restrictedValue"]["passcodeKeyboardComplexity"]["value"]
def loadKeybagFromVolume(volume, device_infos):
systembag = volume.readFile("/keybags/systembag.kb", returnString=True)
if not systembag or not systembag.startswith("bplist"):
print "FAIL: could not read /keybags/systembag.kb from data partition"
return False
lockers = EffaceableLockers(device_infos["lockers"].data)
bag1key = lockers.get("BAG1")[-32:]
keybag = Keybag.createWithSystemkbfile(systembag, bag1key, device_infos.get("key835", "").decode("hex"))
keybag.setDKey(device_infos)
if device_infos.has_key("passcodeKey"):
keybag.unlockWithPasscodeKey(device_infos.get("passcodeKey").decode("hex"))
return keybag
def bruteforcePasscode(device_infos, data_volume):
if device_infos.has_key("passcode"):
print "Passcode already found, no bruteforce required"
return False
kb = data_volume.keybag
if not kb:
return False
rd = RamdiskToolClient.get()
if rd.device_infos.udid != device_infos.udid:
print "Wrong device connected"
return
print "Passcode comlexity (from OpaqueStuff) : %s" % COMPLEXITY.get(kb.passcodeComplexity)
print "Enter passcode or leave blank for bruteforce:"
z = raw_input()
bf = rd.getPasscodeKey(kb.KeyBagKeys, z)
if kb.unlockWithPasscodeKey(bf.get("passcodeKey").decode("hex")):
print "Passcode \"%s\" OK" % z
else:
if z != "":
print "Wrong passcode, trying to bruteforce !"
if kb.passcodeComplexity != 0:
print "Complex passcode used, not bruteforcing"
return False
bf = rd.bruteforceKeyBag(kb.KeyBagKeys)
if bf and kb.unlockWithPasscodeKey(bf.get("passcodeKey").decode("hex")):
print "Bruteforce successful, passcode : %s" % bf["passcode"]
print "Passcode key : %s" % bf.get("passcodeKey")
if kb.unlocked:
device_infos.update(bf)
device_infos["classKeys"] = kb.getClearClassKeysDict()
device_infos["KeyBagKeys"] = plistlib.Data(kb.KeyBagKeys)
return True
return False

View File

@@ -0,0 +1,16 @@
import base64
def chunks(l, n):
return (l[i:i+n] for i in xrange(0, len(l), n))
def RSA_KEY_DER_to_PEM(data):
a = ["-----BEGIN RSA PRIVATE KEY-----"]
a.extend(chunks(base64.b64encode(data),64))
a.append("-----END RSA PRIVATE KEY-----")
return "\n".join(a)
def CERT_DER_to_PEM(data):
a = ["-----BEGIN CERTIFICATE-----"]
a.extend(chunks(base64.b64encode(data),64))
a.append("-----END CERTIFICATE-----")
return "\n".join(a)

View File

@@ -0,0 +1,71 @@
"""
/**************************************************************
LZSS.C -- A Data Compression Program
***************************************************************
4/6/1989 Haruhiko Okumura
Use, distribute, and modify this program freely.
Please send me your improved versions.
PC-VAN SCIENCE
NIFTY-Serve PAF01022
CompuServe 74050,1022
**************************************************************/
/*
* lzss.c - Package for decompressing lzss compressed objects
*
* Copyright (c) 2003 Apple Computer, Inc.
*
* DRI: Josh de Cesare
*/
"""
from array import array
import struct
N = 4096
F = 18
THRESHOLD = 2
NIL = N
def decompress_lzss(str):
if str[:8] !="complzss":
print "decompress_lzss: complzss magic missing"
return
decompsize = struct.unpack(">L", str[12:16])[0]
text_buf = array("B", " "*(N + F - 1))
src = array("B", str[0x180:])
srclen = len(src)
dst = array("B", " "*decompsize)
r = N - F
srcidx, dstidx, flags, c = 0, 0, 0, 0
while True:
flags >>= 1
if ((flags & 0x100) == 0):
if (srcidx >= srclen):
break
c = src[srcidx]; srcidx += 1
flags = c | 0xFF00;
if (flags & 1):
if (srcidx >= srclen):
break
c = src[srcidx]; srcidx += 1
dst[dstidx] = c; dstidx += 1
text_buf[r] = c; r += 1
r &= (N - 1);
else:
if (srcidx >= srclen):
break
i = src[srcidx]; srcidx += 1
if (srcidx >= srclen):
break
j = src[srcidx]; srcidx += 1
i |= ((j & 0xF0) << 4)
j = (j & 0x0F) + THRESHOLD
for k in xrange(j+1):
c = text_buf[(i + k) & (N - 1)]
dst[dstidx] = c; dstidx += 1
text_buf[r] = c; r += 1
r &= (N - 1)
return dst.tostring()

View File

@@ -0,0 +1,172 @@
import plistlib
import struct
import socket
from datetime import datetime
from progressbar import ProgressBar, Percentage, Bar, SimpleProgress, ETA
from usbmux import usbmux
from util import sizeof_fmt
kIOAESAcceleratorEncrypt = 0
kIOAESAcceleratorDecrypt = 1
kIOAESAcceleratorGIDMask = 0x3E8
kIOAESAcceleratorUIDMask = 0x7D0
class DeviceInfo(dict):
@staticmethod
def create(dict):
try:
assert dict.has_key("dataVolumeUUID")
filename = "%s.plist" % dict.get("dataVolumeUUID")
return DeviceInfo(plistlib.readPlist(filename))
except:
return DeviceInfo(dict)
def save(self):
filename = "%s.plist" % self.get("dataVolumeUUID", "unk")
plistlib.writePlist(self, filename)
#stop doing magic stuff
#def __del__(self):
# self.save()
class RamdiskToolClient(object):
instance = None
@staticmethod
def get():
if not RamdiskToolClient.instance:
RamdiskToolClient.instance = RamdiskToolClient()
return RamdiskToolClient.instance
def __init__(self, udid=None, host="localhost", port=1999):
self.host = host
self.port = port
self.device_infos = {}
self.s = None
self.connect(udid)
self.getDeviceInfos()
def close(self):
if self.s:
self.s.close()
self.s = None
def connect(self, udid=None):
mux = usbmux.USBMux()
mux.process(1.0)
if not mux.devices:
print "Waiting for iOS device"
while not mux.devices:
mux.process(1.0)
if not mux.devices:
print "No device found"
return
dev = mux.devices[0]
print "Connecting to device : " + dev.serial
try:
self.s = mux.connect(dev, self.port)
except:
raise Exception("Connexion to device port %d failed" % self.port)
def getDeviceInfos(self):
self.device_infos = self.send_req({"Request":"DeviceInfo"})
keys = self.grabDeviceKeys()
if keys:
self.device_infos.update(keys)
return DeviceInfo.create(self.device_infos)
def downloadFile(self, path):
res = self.send_req({"Request": "DownloadFile",
"Path": path})
if type(res) == plistlib._InternalDict and res.has_key("Data"):
return res["Data"].data
def getSystemKeyBag(self):
return self.send_req({"Request":"GetSystemKeyBag"})
def bruteforceKeyBag(self, KeyBagKeys):
return self.send_req({"Request":"BruteforceSystemKeyBag",
"KeyBagKeys": plistlib.Data(KeyBagKeys)})
def getEscrowRecord(self, hostID):
return self.send_req({"Request":"GetEscrowRecord",
"HostID": hostID})
def getPasscodeKey(self, keybagkeys, passcode):
return self.send_req({"Request":"KeyBagGetPasscodeKey",
"KeyBagKeys": plistlib.Data(keybagkeys),
"passcode": passcode})
def send_msg(self, dict):
plist = plistlib.writePlistToString(dict)
data = struct.pack("<L",len(plist)) + plist
return self.s.send(data)
def recv_msg(self):
try:
l = self.s.recv(4)
if len(l) != 4:
return None
ll = struct.unpack("<L",l)[0]
data = ""
l = 0
while l < ll:
x = self.s.recv(ll-l)
if not x:
return None
data += x
l += len(x)
return plistlib.readPlistFromString(data)
except:
raise
return None
def send_req(self, dict):
start = None
self.send_msg(dict)
while True:
r = self.recv_msg()
if type(r) == plistlib._InternalDict and r.get("MessageType") == "Progress":
if not start:
pbar = ProgressBar(r.get("Total",100),[SimpleProgress(), " ", ETA(), "\n", Percentage(), " ", Bar()])
pbar.start()
start = datetime.utcnow()
pbar.update( r.get("Progress", 0))
else:
if start:
pbar.finish()
print dict.get("Request"), ":", datetime.utcnow() - start
return r
def aesUID(self, data):
return self.aes(data, kIOAESAcceleratorUIDMask, kIOAESAcceleratorEncrypt)
def aesGID(self, data):
return self.aes(data, kIOAESAcceleratorGIDMask, kIOAESAcceleratorDecrypt)
def aes(self, data, keyMask, mode):
return self.send_req({"Request":"AES",
"input": plistlib.Data(data),
"keyMask": keyMask,
"mode": mode,
"bits": 128
})
def grabDeviceKeys(self):
blobs = {"key835": "\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01",
"key899": "\xD1\xE8\xFC\xB5\x39\x37\xBF\x8D\xEF\xC7\x4C\xD1\xD0\xF1\xD4\xB0",
"key89A": "\xDB\x1F\x5B\x33\x60\x6C\x5F\x1C\x19\x34\xAA\x66\x58\x9C\x06\x61",
"key89B": "\x18\x3E\x99\x67\x6B\xB0\x3C\x54\x6F\xA4\x68\xF5\x1C\x0C\xBD\x49"
}
for k,b in blobs.items():
r = self.aesUID(b)
if not r or r.returnCode != 0 or not r.has_key("data"):
print "AES UID error"
return
blobs[k] = r.data.data.encode("hex")
return blobs
def reboot(self):
print "Rebooting device"
return self.send_req({"Request":"Reboot"})

View File

@@ -0,0 +1,19 @@
import struct
def tlvToDict(blob):
d = {}
for tag,data in loopTLVBlocks(blob):
d[tag] = data
return d
def tlvToList(blob):
return list(loopTLVBlocks(blob))
def loopTLVBlocks(blob):
i = 0
while i + 8 <= len(blob):
tag = blob[i:i+4]
length = struct.unpack(">L",blob[i+4:i+8])[0]
data = blob[i+8:i+8+length]
yield (tag,data)
i += 8 + length

View File

@@ -0,0 +1,12 @@
#HAX
d=open("redsn0w_win_0.9.9b4/redsn0w.exe", "rb").read()
i = d.find("<key>IV</key>")
i = d.rfind("<?xml",0,i)
j = d.find("</plist>", i)
assert i != -1
assert j != -1
open("Keys.plist", "wb").write(d[i:j+8])

View File

@@ -0,0 +1,48 @@
12345
123456
123456
1234567
12345678
123456789
1234567890
09876
098765
0987654
09876543
098765432
0987654321
abcd
abcd1
abcde
abcde1
abcdef
abcdef1
abcdefg
abcdefg1
qwer
qwer1
qwert
qwert1
qwertz
qwertz1
asdf
asdfg
asdfg1
asdfgh
asdfgh1
yxcv
yxcv1
yxcvb
yxcvb1
yxcvbn
yxcvbn1
yxcvbnm
yxcvbnm1
mnbv
mnbv1
mnbvc
mnbvc1
mnbvcx
mnbvcx1
mnbvcxy
mnbvcxy1