mirror of
https://github.com/NationalSecurityAgency/ghidra.git
synced 2024-11-24 21:21:56 +00:00
Merge remote-tracking branch 'origin/GP-3860_ryanmkurtz_dyld-stubs-data'
This commit is contained in:
commit
71b5dbe82d
@ -23,6 +23,7 @@ import ghidra.app.util.bin.*;
|
||||
import ghidra.app.util.bin.format.macho.commands.*;
|
||||
import ghidra.app.util.opinion.DyldCacheUtils.SplitDyldCache;
|
||||
import ghidra.program.model.data.*;
|
||||
import ghidra.util.DataConverter;
|
||||
import ghidra.util.exception.DuplicateNameException;
|
||||
|
||||
/**
|
||||
@ -382,6 +383,44 @@ public class MachHeader implements StructConverter {
|
||||
return getDescription();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new Mach Header byte array
|
||||
*
|
||||
* @param magic The magic
|
||||
* @param cpuType The cpu type
|
||||
* @param cpuSubType The cpu subtype
|
||||
* @param fileType The file type
|
||||
* @param nCmds The number of commands
|
||||
* @param sizeOfCmds The size of the commands
|
||||
* @param flags The flags
|
||||
* @param reserved A reserved value (ignored for 32-bit magic)
|
||||
* @return The new header in byte array form
|
||||
* @throws MachException if an invalid magic value was passed in (see {@link MachConstants})
|
||||
*/
|
||||
public static byte[] create(int magic, int cpuType, int cpuSubType, int fileType, int nCmds,
|
||||
int sizeOfCmds, int flags, int reserved) throws MachException {
|
||||
if (!MachConstants.isMagic(magic)) {
|
||||
throw new MachException("Invalid magic: 0x%x".formatted(magic));
|
||||
}
|
||||
|
||||
DataConverter conv = DataConverter.getInstance(magic == MachConstants.MH_MAGIC);
|
||||
boolean is64bit = magic == MachConstants.MH_CIGAM_64 || magic == MachConstants.MH_MAGIC_64;
|
||||
|
||||
byte[] bytes = new byte[is64bit ? 0x20 : 0x1c];
|
||||
conv.putInt(bytes, 0x00, magic);
|
||||
conv.putInt(bytes, 0x04, cpuType);
|
||||
conv.putInt(bytes, 0x08, cpuSubType);
|
||||
conv.putInt(bytes, 0x0c, fileType);
|
||||
conv.putInt(bytes, 0x10, nCmds);
|
||||
conv.putInt(bytes, 0x14, sizeOfCmds);
|
||||
conv.putInt(bytes, 0x18, flags);
|
||||
if (is64bit) {
|
||||
conv.putInt(bytes, 0x1c, reserved);
|
||||
}
|
||||
|
||||
return bytes;
|
||||
}
|
||||
|
||||
private static int readMagic(ByteProvider provider, long machHeaderStartIndexInProvider)
|
||||
throws IOException {
|
||||
BinaryReader br = new BinaryReader(provider, false);
|
||||
|
@ -16,6 +16,7 @@
|
||||
package ghidra.app.util.bin.format.macho.commands;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@ -27,6 +28,7 @@ import ghidra.program.model.address.Address;
|
||||
import ghidra.program.model.data.*;
|
||||
import ghidra.program.model.listing.ProgramModule;
|
||||
import ghidra.program.model.symbol.SourceType;
|
||||
import ghidra.util.DataConverter;
|
||||
import ghidra.util.exception.DuplicateNameException;
|
||||
import ghidra.util.task.TaskMonitor;
|
||||
|
||||
@ -302,4 +304,78 @@ public class SegmentCommand extends LoadCommand {
|
||||
public String toString() {
|
||||
return getSegmentName();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new segment command byte array
|
||||
*
|
||||
* @param magic The magic
|
||||
* @param name The name of the segment (must be less than or equal to 16 bytes)
|
||||
* @param vmAddr The address of the start of the segment
|
||||
* @param vmSize The size of the segment in memory
|
||||
* @param fileOffset The file offset of the start of the segment
|
||||
* @param fileSize The size of the segment on disk
|
||||
* @param maxProt The maximum protections of the segment
|
||||
* @param initProt The initial protection of the segment
|
||||
* @param numSections The number of sections in the segment
|
||||
* @param flags The segment flags
|
||||
* @return The new segment in byte array form
|
||||
* @throws MachException if an invalid magic value was passed in (see {@link MachConstants}), or
|
||||
* if the desired segment name exceeds 16 bytes
|
||||
*/
|
||||
public static byte[] create(int magic, String name, long vmAddr, long vmSize, long fileOffset,
|
||||
long fileSize, int maxProt, int initProt, int numSections, int flags)
|
||||
throws MachException {
|
||||
|
||||
if (name.length() > 16) {
|
||||
throw new MachException("Segment name cannot exceed 16 bytes: " + name);
|
||||
}
|
||||
|
||||
DataConverter conv = DataConverter.getInstance(magic == MachConstants.MH_MAGIC);
|
||||
boolean is64bit = magic == MachConstants.MH_CIGAM_64 || magic == MachConstants.MH_MAGIC_64;
|
||||
|
||||
// Segment Command
|
||||
byte[] bytes = new byte[size(magic)];
|
||||
conv.putInt(bytes, 0x00,
|
||||
is64bit ? LoadCommandTypes.LC_SEGMENT_64 : LoadCommandTypes.LC_SEGMENT);
|
||||
conv.putInt(bytes, 0x04, bytes.length);
|
||||
byte[] nameBytes = name.getBytes(StandardCharsets.US_ASCII);
|
||||
System.arraycopy(nameBytes, 0, bytes, 0x8, nameBytes.length);
|
||||
if (is64bit) {
|
||||
conv.putLong(bytes, 0x18, vmAddr);
|
||||
conv.putLong(bytes, 0x20, vmSize);
|
||||
conv.putLong(bytes, 0x28, fileOffset);
|
||||
conv.putLong(bytes, 0x30, fileSize);
|
||||
conv.putInt(bytes, 0x38, maxProt);
|
||||
conv.putInt(bytes, 0x3c, initProt);
|
||||
conv.putInt(bytes, 0x40, numSections);
|
||||
conv.putInt(bytes, 0x44, flags);
|
||||
}
|
||||
else {
|
||||
conv.putInt(bytes, 0x18, (int) vmAddr);
|
||||
conv.putInt(bytes, 0x1c, (int) vmSize);
|
||||
conv.putInt(bytes, 0x20, (int) fileOffset);
|
||||
conv.putInt(bytes, 0x24, (int) fileSize);
|
||||
conv.putInt(bytes, 0x28, maxProt);
|
||||
conv.putInt(bytes, 0x2c, initProt);
|
||||
conv.putInt(bytes, 0x30, numSections);
|
||||
conv.putInt(bytes, 0x34, flags);
|
||||
}
|
||||
|
||||
return bytes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the size a segment command would be for the given magic
|
||||
*
|
||||
* @param magic The magic
|
||||
* @return The size in bytes a segment command would be for the given magic
|
||||
* @throws MachException if an invalid magic value was passed in (see {@link MachConstants})
|
||||
*/
|
||||
public static int size(int magic) throws MachException {
|
||||
if (!MachConstants.isMagic(magic)) {
|
||||
throw new MachException("Invalid magic: 0x%x".formatted(magic));
|
||||
}
|
||||
boolean is64bit = magic == MachConstants.MH_CIGAM_64 || magic == MachConstants.MH_MAGIC_64;
|
||||
return is64bit ? 0x48 : 0x38;
|
||||
}
|
||||
}
|
||||
|
@ -118,6 +118,14 @@ public class DyldCacheMappingAndSlideInfo implements StructConverter {
|
||||
return flags;
|
||||
}
|
||||
|
||||
public int getMaxProtection() {
|
||||
return maxProt;
|
||||
}
|
||||
|
||||
public int getInitialProtection() {
|
||||
return initProt;
|
||||
}
|
||||
|
||||
public boolean isAuthData() {
|
||||
return (flags & DYLD_CACHE_MAPPING_AUTH_DATA) != 0;
|
||||
}
|
||||
|
@ -19,6 +19,9 @@ import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.*;
|
||||
|
||||
import com.google.common.collect.Range;
|
||||
import com.google.common.collect.RangeSet;
|
||||
|
||||
import ghidra.app.util.bin.*;
|
||||
import ghidra.app.util.bin.format.macho.*;
|
||||
import ghidra.app.util.bin.format.macho.commands.*;
|
||||
@ -37,7 +40,23 @@ import ghidra.util.task.TaskMonitor;
|
||||
public class DyldCacheExtractor {
|
||||
|
||||
/**
|
||||
* Gets an {@link ByteProvider} that reads a DYLIB from a {@link DyldCacheFileSystem}. The
|
||||
* A footer that gets appended to the end of every extracted component so Ghidra can identify
|
||||
* them and treat them special when imported
|
||||
*/
|
||||
public static final byte[] FOOTER_V1 =
|
||||
"Ghidra DYLD extraction v1".getBytes(StandardCharsets.US_ASCII);
|
||||
|
||||
/**
|
||||
* A {@link DyldCacheMappingAndSlideInfo} with a possibly reduced set of available addresses
|
||||
* within the mapping
|
||||
*
|
||||
* @param mappingInfo A {@link DyldCacheMappingAndSlideInfo}
|
||||
* @param rangeSet A a possibly reduced set of available addresses within the mapping
|
||||
*/
|
||||
public record MappingRange(DyldCacheMappingAndSlideInfo mappingInfo, RangeSet<Long> rangeSet) {}
|
||||
|
||||
/**
|
||||
* Gets a {@link ByteProvider} that contains a DYLIB from a {@link DyldCacheFileSystem}. The
|
||||
* DYLIB's header will be altered to account for its segment bytes being packed down.
|
||||
*
|
||||
* @param dylibOffset The offset of the DYLIB in the given provider
|
||||
@ -62,19 +81,97 @@ public class DyldCacheExtractor {
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the given value to a byte array
|
||||
* Gets a {@link ByteProvider} that contains a byte mapping from a {@link DyldCacheFileSystem}
|
||||
*
|
||||
* @param value The value to convert to a byte array
|
||||
* @param size The number of bytes to convert (must be 4 or 8)
|
||||
* @return The value as a byte array of the given size
|
||||
* @throws IllegalArgumentException if size is an unsupported value
|
||||
* @param mappingRange The {@link MappingRange}
|
||||
* @param segmentName The name of the segment in the resulting Mach-O
|
||||
* @param splitDyldCache The {@link SplitDyldCache}
|
||||
* @param index The mapping's {@link SplitDyldCache} index
|
||||
* @param slideFixupMap A {@link Map} of {@link DyldCacheSlideFixup}s to perform
|
||||
* @param fsrl {@link FSRL} to assign to the resulting {@link ByteProvider}
|
||||
* @param monitor {@link TaskMonitor}
|
||||
* @return {@link ByteProvider} containing the bytes of the mapping
|
||||
* @throws MachException If there was an error creating Mach-O headers
|
||||
* @throws IOException If there was an IO-related issue with extracting the mapping
|
||||
* @throws CancelledException If the user cancelled the operation
|
||||
*/
|
||||
private static byte[] toBytes(long value, int size) throws IllegalArgumentException {
|
||||
if (size != 4 && size != 8) {
|
||||
throw new IllegalArgumentException("Size must be 4 or 8 (got " + size + ")");
|
||||
public static ByteProvider extractMapping(MappingRange mappingRange, String segmentName,
|
||||
SplitDyldCache splitDyldCache, int index,
|
||||
Map<DyldCacheSlideInfoCommon, List<DyldCacheSlideFixup>> slideFixupMap, FSRL fsrl,
|
||||
TaskMonitor monitor) throws IOException, MachException, CancelledException {
|
||||
|
||||
int magic = MachConstants.MH_MAGIC_64;
|
||||
List<Range<Long>> ranges = new ArrayList<>(mappingRange.rangeSet().asRanges());
|
||||
DyldCacheMappingAndSlideInfo mappingInfo = mappingRange.mappingInfo();
|
||||
int allSegmentsSize = SegmentCommand.size(magic) * ranges.size();
|
||||
|
||||
// Fix slide pointers
|
||||
ByteProvider origProvider = splitDyldCache.getProvider(index);
|
||||
byte[] fixedProviderBytes = origProvider.readBytes(0, origProvider.length());
|
||||
DyldCacheSlideInfoCommon slideInfo = slideFixupMap.keySet()
|
||||
.stream()
|
||||
.filter(e -> e.getMappingAddress() == mappingInfo.getAddress())
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
if (slideInfo != null) {
|
||||
List<DyldCacheSlideFixup> slideFixups = slideFixupMap.get(slideInfo);
|
||||
monitor.initialize(slideFixups.size(), "Fixing slide pointers...");
|
||||
for (DyldCacheSlideFixup fixup : slideFixups) {
|
||||
monitor.increment();
|
||||
long fileOffset = slideInfo.getMappingFileOffset() + fixup.offset();
|
||||
byte[] newBytes = toBytes(fixup.value(), fixup.size());
|
||||
System.arraycopy(newBytes, 0, fixedProviderBytes, (int) fileOffset,
|
||||
newBytes.length);
|
||||
}
|
||||
}
|
||||
DataConverter converter = LittleEndianDataConverter.INSTANCE;
|
||||
return size == 8 ? converter.getBytes(value) : converter.getBytes((int) value);
|
||||
|
||||
// Mach-O Header
|
||||
byte[] header = MachHeader.create(magic, 0x100000c, 0x80000002, 6, ranges.size(),
|
||||
allSegmentsSize, 0x42100085, 0);
|
||||
|
||||
// Segment commands and data
|
||||
List<byte[]> segments = new ArrayList<>();
|
||||
List<byte[]> data = new ArrayList<>();
|
||||
int current = header.length + allSegmentsSize;
|
||||
try (ByteProvider fixedProvider = new ByteArrayProvider(fixedProviderBytes)) {
|
||||
for (int i = 0; i < ranges.size(); i++) {
|
||||
Range<Long> range = ranges.get(i);
|
||||
|
||||
// Segment Command
|
||||
long dataSize = range.upperEndpoint() - range.lowerEndpoint();
|
||||
segments.add(
|
||||
SegmentCommand.create(magic, "%s.%d.%d".formatted(segmentName, index, i),
|
||||
range.lowerEndpoint(), dataSize, current, dataSize,
|
||||
mappingInfo.getMaxProtection(), mappingInfo.getMaxProtection(), 0, 0));
|
||||
|
||||
// Data
|
||||
data.add(fixedProvider.readBytes(
|
||||
range.lowerEndpoint() - mappingInfo.getAddress() + mappingInfo.getFileOffset(),
|
||||
dataSize));
|
||||
|
||||
current += dataSize;
|
||||
}
|
||||
}
|
||||
|
||||
// Combine pieces
|
||||
int dataSize = data.stream().mapToInt(d -> d.length).sum();
|
||||
int totalSize = header.length + allSegmentsSize + dataSize;
|
||||
byte[] result = new byte[totalSize + FOOTER_V1.length];
|
||||
System.arraycopy(header, 0, result, 0, header.length);
|
||||
current = header.length;
|
||||
for (byte[] segment : segments) {
|
||||
System.arraycopy(segment, 0, result, current, segment.length);
|
||||
current += segment.length;
|
||||
}
|
||||
for (byte[] d : data) {
|
||||
System.arraycopy(d, 0, result, current, d.length);
|
||||
current += d.length;
|
||||
}
|
||||
|
||||
// Add footer
|
||||
System.arraycopy(FOOTER_V1, 0, result, result.length - FOOTER_V1.length, FOOTER_V1.length);
|
||||
|
||||
return new ByteArrayProvider(result, fsrl);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -113,6 +210,22 @@ public class DyldCacheExtractor {
|
||||
return slideFixupMap;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the given value to a byte array
|
||||
*
|
||||
* @param value The value to convert to a byte array
|
||||
* @param size The number of bytes to convert (must be 4 or 8)
|
||||
* @return The value as a byte array of the given size
|
||||
* @throws IllegalArgumentException if size is an unsupported value
|
||||
*/
|
||||
private static byte[] toBytes(long value, int size) throws IllegalArgumentException {
|
||||
if (size != 4 && size != 8) {
|
||||
throw new IllegalArgumentException("Size must be 4 or 8 (got " + size + ")");
|
||||
}
|
||||
DataConverter converter = LittleEndianDataConverter.INSTANCE;
|
||||
return size == 8 ? converter.getBytes(value) : converter.getBytes((int) value);
|
||||
}
|
||||
|
||||
/**
|
||||
* A packed DYLIB that was once living inside of a DYLD shared cache. The DYLIB is said to be
|
||||
* packed because its segment file bytes, which were not adjacent in its containing DYLD, are
|
||||
@ -198,6 +311,9 @@ public class DyldCacheExtractor {
|
||||
}
|
||||
}
|
||||
|
||||
// Account for the size of the footer
|
||||
packedSize += FOOTER_V1.length;
|
||||
|
||||
packed = new byte[packedSize];
|
||||
|
||||
// Copy each segment into the packed array (leaving no gaps)
|
||||
@ -217,8 +333,10 @@ public class DyldCacheExtractor {
|
||||
// that might get extracted and added to the same program. Rather than
|
||||
// computing the optimal address it should go at (which will required looking
|
||||
// at every other DYLIB in the cache which is slow), just make the address very
|
||||
// far away from the other DYLIB's
|
||||
segment.setVMaddress(textSegment.getVMaddress() << 4);
|
||||
// far away from the other DYLIB's. This should be safe for 64-bit binaries.
|
||||
if (!machoHeader.is32bit()) {
|
||||
segment.setVMaddress(textSegment.getVMaddress() << 4);
|
||||
}
|
||||
}
|
||||
else {
|
||||
bytes = segmentProvider.readBytes(segment.getFileOffset(), segmentSize);
|
||||
@ -230,6 +348,10 @@ public class DyldCacheExtractor {
|
||||
fixupMachHeader();
|
||||
fixupLoadCommands();
|
||||
fixupSlidePointers();
|
||||
|
||||
// Add footer
|
||||
System.arraycopy(FOOTER_V1, 0, packed, packed.length - FOOTER_V1.length,
|
||||
FOOTER_V1.length);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -684,3 +806,4 @@ public class DyldCacheExtractor {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -18,13 +18,20 @@ package ghidra.file.formats.ios.dyldcache;
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
|
||||
import com.google.common.collect.*;
|
||||
|
||||
import ghidra.app.util.bin.BinaryReader;
|
||||
import ghidra.app.util.bin.ByteProvider;
|
||||
import ghidra.app.util.bin.format.macho.MachException;
|
||||
import ghidra.app.util.bin.format.macho.MachHeader;
|
||||
import ghidra.app.util.bin.format.macho.commands.SegmentCommand;
|
||||
import ghidra.app.util.bin.format.macho.dyld.*;
|
||||
import ghidra.app.util.importer.MessageLog;
|
||||
import ghidra.app.util.opinion.DyldCacheUtils;
|
||||
import ghidra.app.util.opinion.DyldCacheUtils.SplitDyldCache;
|
||||
import ghidra.file.formats.ios.dyldcache.DyldCacheExtractor.MappingRange;
|
||||
import ghidra.formats.gfilesystem.*;
|
||||
import ghidra.formats.gfilesystem.annotations.FileSystemInfo;
|
||||
import ghidra.formats.gfilesystem.factory.GFileSystemBaseFactory;
|
||||
@ -32,6 +39,9 @@ import ghidra.util.exception.CancelledException;
|
||||
import ghidra.util.exception.CryptoException;
|
||||
import ghidra.util.task.TaskMonitor;
|
||||
|
||||
/**
|
||||
* A {@link GFileSystem} implementation for the components of a DYLD Cache
|
||||
*/
|
||||
@FileSystemInfo(type = "dyldcachev1", description = "iOS DYLD Cache Version 1", factory = GFileSystemBaseFactory.class)
|
||||
public class DyldCacheFileSystem extends GFileSystemBase {
|
||||
|
||||
@ -40,7 +50,15 @@ public class DyldCacheFileSystem extends GFileSystemBase {
|
||||
private Map<DyldCacheSlideInfoCommon, List<DyldCacheSlideFixup>> slideFixupMap;
|
||||
private Map<GFile, Long> addrMap = new HashMap<>();
|
||||
private Map<GFile, Integer> indexMap = new HashMap<>();
|
||||
private Map<Long, MappingRange> stubMap = new HashMap<>();
|
||||
private Map<Long, MappingRange> dyldDataMap = new HashMap<>();
|
||||
|
||||
/**
|
||||
* Creates a new {@link DyldCacheFileSystem}
|
||||
*
|
||||
* @param fileSystemName The name of the file system
|
||||
* @param provider The {@link ByteProvider} that contains the file system
|
||||
*/
|
||||
public DyldCacheFileSystem(String fileSystemName, ByteProvider provider) {
|
||||
super(fileSystemName, provider);
|
||||
}
|
||||
@ -51,6 +69,8 @@ public class DyldCacheFileSystem extends GFileSystemBase {
|
||||
parsedLocalSymbols = false;
|
||||
addrMap.clear();
|
||||
indexMap.clear();
|
||||
stubMap.clear();
|
||||
dyldDataMap.clear();
|
||||
if (splitDyldCache != null) {
|
||||
splitDyldCache.close();
|
||||
splitDyldCache = null;
|
||||
@ -82,6 +102,16 @@ public class DyldCacheFileSystem extends GFileSystemBase {
|
||||
}
|
||||
|
||||
try {
|
||||
if (stubMap.containsKey(addr)) {
|
||||
MappingRange mappingRange = stubMap.get(addr);
|
||||
return DyldCacheExtractor.extractMapping(mappingRange, "_STUBS", splitDyldCache,
|
||||
index, slideFixupMap, file.getFSRL(), monitor);
|
||||
}
|
||||
if (dyldDataMap.containsKey(addr)) {
|
||||
MappingRange mappingRange = dyldDataMap.get(addr);
|
||||
return DyldCacheExtractor.extractMapping(mappingRange, "__DATA", splitDyldCache,
|
||||
index, slideFixupMap, file.getFSRL(), monitor);
|
||||
}
|
||||
return DyldCacheExtractor.extractDylib(machHeaderStartIndexInProvider,
|
||||
splitDyldCache, index, slideFixupMap, file.getFSRL(), monitor);
|
||||
}
|
||||
@ -130,26 +160,120 @@ public class DyldCacheFileSystem extends GFileSystemBase {
|
||||
|
||||
@Override
|
||||
public void open(TaskMonitor monitor) throws IOException, CryptoException, CancelledException {
|
||||
MessageLog log = new MessageLog();
|
||||
monitor.setMessage("Opening DYLD cache...");
|
||||
|
||||
splitDyldCache = new SplitDyldCache(provider, false, log, monitor);
|
||||
splitDyldCache = new SplitDyldCache(provider, false, new MessageLog(), monitor);
|
||||
Map<Integer, List<MappingRange>> dyldDataMappingRanges = new HashMap<>();
|
||||
|
||||
// Find and store all the mappings for the DYLD data subcaches.
|
||||
// As we find other components that overlap, these mapping ranges will be reduced so there
|
||||
// is no overlap
|
||||
monitor.initialize(splitDyldCache.size(), "Find DYLD data...");
|
||||
for (int i = 0; i < splitDyldCache.size(); i++) {
|
||||
monitor.increment();
|
||||
String name = splitDyldCache.getName(i);
|
||||
DyldCacheHeader header = splitDyldCache.getDyldCacheHeader(i);
|
||||
monitor.setMessage("Find files...");
|
||||
List<DyldCacheMappingAndSlideInfo> mappingInfos = header.getCacheMappingAndSlideInfos();
|
||||
List<MappingRange> mappingRangeList = new ArrayList<>();
|
||||
if (name.endsWith(".dylddata")) {
|
||||
dyldDataMappingRanges.put(i, mappingRangeList);
|
||||
for (int j = 0; j < mappingInfos.size(); j++) {
|
||||
DyldCacheMappingAndSlideInfo mappingInfo = mappingInfos.get(j);
|
||||
mappingRangeList.add(new MappingRange(mappingInfo, getRangeSet(mappingInfo)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Find DYLIB and STUBS components. Remove DYLIB segment ranges from the DYLD data mappings
|
||||
// so we can later add the DYLD data with no overlap.
|
||||
// NOTE: The STUBS will never overlap with DYLD data so there is no need to remove STUB
|
||||
// segment ranges.
|
||||
monitor.initialize(splitDyldCache.size(), "Find DYLD components...");
|
||||
for (int i = 0; i < splitDyldCache.size(); i++) {
|
||||
monitor.increment();
|
||||
String name = splitDyldCache.getName(i);
|
||||
DyldCacheHeader header = splitDyldCache.getDyldCacheHeader(i);
|
||||
List<DyldCacheMappingAndSlideInfo> mappingInfos = header.getCacheMappingAndSlideInfos();
|
||||
|
||||
// DYLIBs
|
||||
List<DyldCacheImage> mappedImages = header.getMappedImages();
|
||||
monitor.initialize(mappedImages.size());
|
||||
for (DyldCacheImage mappedImage : mappedImages) {
|
||||
GFileImpl file =
|
||||
GFileImpl.fromPathString(this, root, mappedImage.getPath(), null, false, -1);
|
||||
storeFile(file, mappedImage.getAddress(), i);
|
||||
reduceOverlappingSegments(i, mappedImage, dyldDataMappingRanges.values());
|
||||
}
|
||||
|
||||
// STUBS
|
||||
for (DyldCacheMappingAndSlideInfo mappingInfo : mappingInfos) {
|
||||
if (mappingInfo.isTextStubs()) {
|
||||
GFileImpl file =
|
||||
GFileImpl.fromPathString(this, root, getStubPath(name), null, false, -1);
|
||||
storeFile(file, mappingInfo.getAddress(), i);
|
||||
stubMap.put(mappingInfo.getAddress(),
|
||||
new MappingRange(mappingInfo, getRangeSet(mappingInfo)));
|
||||
break; // assuming just 1 stub block
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add DYLD data components with reduced mapping ranges
|
||||
for (Integer i : dyldDataMappingRanges.keySet()) {
|
||||
String name = splitDyldCache.getName(i);
|
||||
List<MappingRange> mappingRangeList = dyldDataMappingRanges.get(i);
|
||||
for (int j = 0; j < mappingRangeList.size(); j++) {
|
||||
monitor.checkCancelled();
|
||||
monitor.incrementProgress(1);
|
||||
MappingRange mappingRange = mappingRangeList.get(j);
|
||||
DyldCacheMappingAndSlideInfo mappingInfo = mappingRange.mappingInfo();
|
||||
GFileImpl file =
|
||||
GFileImpl.fromPathString(this, root, getDyldDataPath(name, j), null, false, -1);
|
||||
storeFile(file, mappingInfo.getAddress(), i);
|
||||
dyldDataMap.put(mappingInfo.getAddress(), mappingRange);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void storeFile(GFile file, Long addr, Integer index) {
|
||||
/**
|
||||
* Gets the open {@link SplitDyldCache}
|
||||
*
|
||||
* @return The opened {@link SplitDyldCache}, or null if it has is not open
|
||||
*/
|
||||
public SplitDyldCache getSplitDyldCache() {
|
||||
return splitDyldCache;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the text stub path for the given DYLD Cache name
|
||||
*
|
||||
* @param dyldCacheName The name of the DYLD Cache
|
||||
* @return The text stub path for the given DYLD Cache name
|
||||
*/
|
||||
public static String getStubPath(String dyldCacheName) {
|
||||
return "/STUBS/STUBS." + FilenameUtils.getExtension(dyldCacheName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the DYLD data path for the given DYLD Cache name
|
||||
*
|
||||
* @param dyldCacheName The name of the DYLD Cache
|
||||
* @param mappingIndex The mapping index
|
||||
* @return The DYLD data path for the given DYLD Cache name
|
||||
*/
|
||||
public static String getDyldDataPath(String dyldCacheName, int mappingIndex) {
|
||||
return "/DYLD_DATA/DYLD_DATA.%s.%d".formatted(
|
||||
FilenameUtils.getExtension(
|
||||
dyldCacheName.substring(0, dyldCacheName.length() - ".dylddata".length())),
|
||||
mappingIndex);
|
||||
}
|
||||
|
||||
/**
|
||||
* "Stores" the given {@link GFile file} and it's parent hierarchy in lookup maps for future
|
||||
* access
|
||||
*
|
||||
* @param file The {@link GFile file} to store
|
||||
* @param address The address that corresponds to the {@link GFile file}
|
||||
* @param splitDyldCacheIndex The {@link SplitDyldCache} index that corresponds to the given
|
||||
* {@link GFile file}
|
||||
*/
|
||||
private void storeFile(GFile file, Long address, Integer splitDyldCacheIndex) {
|
||||
if (file == null) {
|
||||
return;
|
||||
}
|
||||
@ -157,10 +281,53 @@ public class DyldCacheFileSystem extends GFileSystemBase {
|
||||
return;
|
||||
}
|
||||
if (!addrMap.containsKey(file) || addrMap.get(file) == null) {
|
||||
addrMap.put(file, addr);
|
||||
indexMap.put(file, index);
|
||||
addrMap.put(file, address);
|
||||
indexMap.put(file, splitDyldCacheIndex);
|
||||
}
|
||||
GFile parentFile = file.getParentFile();
|
||||
storeFile(parentFile, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the default range of the given {@link DyldCacheMappingAndSlideInfo}
|
||||
*
|
||||
* @param mappingInfo The {@link DyldCacheMappingAndSlideInfo} to the the range of
|
||||
* @return The default range of the given {@link DyldCacheMappingAndSlideInfo}
|
||||
*/
|
||||
private RangeSet<Long> getRangeSet(DyldCacheMappingAndSlideInfo mappingInfo) {
|
||||
RangeSet<Long> rangeSet = TreeRangeSet.create();
|
||||
rangeSet.add(Range.openClosed(mappingInfo.getAddress(),
|
||||
mappingInfo.getAddress() + mappingInfo.getSize()));
|
||||
return rangeSet;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reduces the given ranges so they do not overlap with the segments founded in the given image
|
||||
*
|
||||
* @param splitDyldCacheIndex The {@link SplitDyldCache}
|
||||
* @param mappedImage The {@link DyldCacheImage} that may overlap the given ranges
|
||||
* @param ranges The {@link MappingRange ranges} to reduce
|
||||
* @throws IOException if an exception occurred while parsing the image's Mach-O header
|
||||
*/
|
||||
private void reduceOverlappingSegments(int splitDyldCacheIndex, DyldCacheImage mappedImage,
|
||||
Collection<List<MappingRange>> ranges) throws IOException {
|
||||
DyldCacheHeader dyldCacheHeader = splitDyldCache.getDyldCacheHeader(splitDyldCacheIndex);
|
||||
ByteProvider p = splitDyldCache.getProvider(splitDyldCacheIndex);
|
||||
try {
|
||||
MachHeader machoHeader =
|
||||
new MachHeader(p, mappedImage.getAddress() - dyldCacheHeader.getBaseAddress());
|
||||
for (SegmentCommand segment : machoHeader.parseSegments()) {
|
||||
for (List<MappingRange> mappingRanges : ranges) {
|
||||
for (MappingRange mappingRange : mappingRanges) {
|
||||
Range<Long> range = Range.closedOpen(segment.getVMaddress(),
|
||||
segment.getVMaddress() + segment.getVMsize());
|
||||
mappingRange.rangeSet().remove(range);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (MachException e) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user