Merge branch 'GP-1864_d-millar_dumpfile_loader_REBASED_REVIEWED'

This commit is contained in:
ghidra1 2022-06-10 10:10:57 -04:00
commit 672c1f11e2
83 changed files with 13552 additions and 50 deletions

View File

@ -64,8 +64,8 @@ public class DebugDataDirectory extends DataDirectory {
@Override
public void markup(Program program, boolean isBinary, TaskMonitor monitor, MessageLog log,
NTHeader ntHeader) throws DuplicateNameException, CodeUnitInsertionException,
IOException {
NTHeader ntHeader)
throws DuplicateNameException, CodeUnitInsertionException, IOException {
monitor.setMessage(program.getName()+": debug...");
Address addr = PeUtils.getMarkupAddress(program, isBinary, ntHeader, virtualAddress);

View File

@ -86,8 +86,8 @@ public class DelayImportDataDirectory extends DataDirectory {
@Override
public void markup(Program program, boolean isBinary, TaskMonitor monitor, MessageLog log,
NTHeader ntHeader) throws DuplicateNameException, CodeUnitInsertionException,
IOException {
NTHeader ntHeader)
throws DuplicateNameException, CodeUnitInsertionException, IOException {
monitor.setMessage(program.getName()+": delay import(s)...");
Address addr = PeUtils.getMarkupAddress(program, isBinary, ntHeader, virtualAddress);

View File

@ -49,8 +49,9 @@ public class ExceptionDataDirectory extends DataDirectory {
@Override
public void markup(Program program, boolean isBinary, TaskMonitor monitor, MessageLog log,
NTHeader ntHeader) throws DuplicateNameException, CodeUnitInsertionException,
IOException {
NTHeader ntHeader)
throws DuplicateNameException, CodeUnitInsertionException, IOException {
monitor.setMessage(program.getName()+": exceptions...");
Address addr = PeUtils.getMarkupAddress(program, isBinary, ntHeader, virtualAddress);
if (!program.getMemory().contains(addr)) {

View File

@ -145,8 +145,9 @@ public class ExportDataDirectory extends DataDirectory {
@Override
public void markup(Program program, boolean isBinary, TaskMonitor monitor, MessageLog log,
NTHeader ntHeader) throws DuplicateNameException, CodeUnitInsertionException,
IOException {
NTHeader ntHeader)
throws DuplicateNameException, CodeUnitInsertionException, IOException {
monitor.setMessage("[" + program.getName() + "]: exports...");
Address addr = PeUtils.getMarkupAddress(program, isBinary, ntHeader, virtualAddress);

View File

@ -41,8 +41,8 @@ public class GlobalPointerDataDirectory extends DataDirectory {
@Override
public void markup(Program program, boolean isBinary, TaskMonitor monitor, MessageLog log,
NTHeader ntHeader) throws DuplicateNameException, CodeUnitInsertionException,
IOException {
NTHeader ntHeader)
throws DuplicateNameException, CodeUnitInsertionException, IOException {
monitor.setMessage(program.getName()+": global pointers...");
Address addr = PeUtils.getMarkupAddress(program, isBinary, ntHeader, virtualAddress);
if (!program.getMemory().contains(addr)) {

View File

@ -76,8 +76,9 @@ public class ImportDataDirectory extends DataDirectory {
@Override
public void markup(Program program, boolean isBinary, TaskMonitor monitor, MessageLog log,
NTHeader ntHeader) throws DuplicateNameException, CodeUnitInsertionException,
IOException, MemoryAccessException {
NTHeader ntHeader)
throws DuplicateNameException, CodeUnitInsertionException, IOException,
MemoryAccessException {
if (imports == null || descriptors == null) {
return;

View File

@ -50,8 +50,9 @@ public class LoadConfigDataDirectory extends DataDirectory {
@Override
public void markup(Program program, boolean isBinary, TaskMonitor monitor, MessageLog log,
NTHeader ntHeader) throws DuplicateNameException, CodeUnitInsertionException,
IOException {
NTHeader ntHeader)
throws DuplicateNameException, CodeUnitInsertionException, IOException {
monitor.setMessage(program.getName()+": load config directory...");
Address addr = PeUtils.getMarkupAddress(program, isBinary, ntHeader, virtualAddress);

View File

@ -269,6 +269,8 @@ public class OptionalHeaderImpl implements OptionalHeader {
@Override
public void processDataDirectories(TaskMonitor monitor) throws IOException {
reader.setPointerIndex(startOfDataDirs);
dataDirectory = new DataDirectory[numberOfRvaAndSizes];
if (numberOfRvaAndSizes == 0) {
return;
@ -513,7 +515,7 @@ public class OptionalHeaderImpl implements OptionalHeader {
if (is64bit()) {
baseOfData = -1;//not used
imageBase = reader.readNextLong();
if (imageBase <= 0) {
if (imageBase <= 0 && !is64bit()) {
Msg.warn(this, "Non-standard image base: 0x" + Long.toHexString(imageBase));
originalImageBase = imageBase;
imageBase = 0x10000;

View File

@ -177,8 +177,8 @@ public class ResourceDataDirectory extends DataDirectory {
@Override
public void markup(Program program, boolean isBinary, TaskMonitor monitor, MessageLog log,
NTHeader ntHeader) throws DuplicateNameException, CodeUnitInsertionException,
IOException {
NTHeader ntHeader)
throws DuplicateNameException, CodeUnitInsertionException, IOException {
if (rootDirectory == null) {
return;

View File

@ -63,8 +63,8 @@ public class SecurityDataDirectory extends DataDirectory implements ByteArrayCon
@Override
public void markup(Program program, boolean isBinary, TaskMonitor monitor, MessageLog log,
NTHeader ntHeader) throws DuplicateNameException, CodeUnitInsertionException,
IOException {
NTHeader ntHeader)
throws DuplicateNameException, CodeUnitInsertionException, IOException {
if (!isBinary) {//certificates are never mapped into running program...
return;

View File

@ -58,8 +58,8 @@ public class TLSDataDirectory extends DataDirectory {
@Override
public void markup(Program program, boolean isBinary, TaskMonitor monitor, MessageLog log,
NTHeader ntHeader) throws DuplicateNameException, CodeUnitInsertionException,
IOException {
NTHeader ntHeader)
throws DuplicateNameException, CodeUnitInsertionException, IOException {
monitor.setMessage(program.getName()+": TLS...");
Address addr = PeUtils.getMarkupAddress(program, isBinary, ntHeader, virtualAddress);

View File

@ -76,7 +76,7 @@ public class PeLoader extends AbstractPeDebugLoader {
return loadSpecs;
}
PortableExecutable pe = new PortableExecutable(provider, SectionLayout.FILE, false, false);
PortableExecutable pe = new PortableExecutable(provider, getSectionLayout(), false, false);
NTHeader ntHeader = pe.getNTHeader();
if (ntHeader != null && ntHeader.getOptionalHeader() != null) {
long imageBase = ntHeader.getOptionalHeader().getImageBase();
@ -102,7 +102,7 @@ public class PeLoader extends AbstractPeDebugLoader {
return;
}
PortableExecutable pe = new PortableExecutable(provider, SectionLayout.FILE, false,
PortableExecutable pe = new PortableExecutable(provider, getSectionLayout(), false,
shouldParseCliHeaders(options));
NTHeader ntHeader = pe.getNTHeader();
@ -113,7 +113,7 @@ public class PeLoader extends AbstractPeDebugLoader {
FileHeader fileHeader = ntHeader.getFileHeader();
monitor.setMessage("Completing PE header parsing...");
FileBytes fileBytes = MemoryBlockUtils.createFileBytes(program, provider, monitor);
FileBytes fileBytes = createFileBytes(provider, program, monitor);
try {
Map<SectionHeader, Address> sectionToAddress =
processMemoryBlocks(pe, program, fileBytes, monitor, log);
@ -166,6 +166,16 @@ public class PeLoader extends AbstractPeDebugLoader {
monitor.setMessage("[" + program.getName() + "]: done!");
}
protected SectionLayout getSectionLayout() {
return SectionLayout.FILE;
}
protected FileBytes createFileBytes(ByteProvider provider, Program program, TaskMonitor monitor)
throws IOException, CancelledException {
FileBytes fileBytes = MemoryBlockUtils.createFileBytes(program, provider, monitor);
return fileBytes;
}
@Override
public List<Option> getDefaultOptions(ByteProvider provider, LoadSpec loadSpec,
DomainObject domainObject, boolean loadIntoProgram) {
@ -211,7 +221,8 @@ public class PeLoader extends AbstractPeDebugLoader {
return PARSE_CLI_HEADERS_OPTION_DEFAULT;
}
private void layoutHeaders(Program program, PortableExecutable pe, NTHeader ntHeader,
private void layoutHeaders(Program program, PortableExecutable pe,
NTHeader ntHeader,
DataDirectory[] datadirs) {
try {
DataType dt = pe.getDOSHeader().toDataType();
@ -419,7 +430,6 @@ public class PeLoader extends AbstractPeDebugLoader {
AddressSpace space = af.getDefaultAddressSpace();
Listing listing = program.getListing();
ReferenceManager refManager = program.getReferenceManager();
ImportInfo[] imports = idd.getImports();
for (ImportInfo importInfo : imports) {
@ -441,27 +451,30 @@ public class PeLoader extends AbstractPeDebugLoader {
setComment(CodeUnit.PRE_COMMENT, address, importInfo.getComment());
Data data = listing.getDefinedDataAt(address);
if (data == null || !(data.getValue() instanceof Address)) {
continue;
if (data != null && data.isPointer()) {
addExternalReference(data, importInfo, log);
}
}
}
Address extAddr = (Address) data.getValue();
if (extAddr != null) {
// remove the existing mem reference that was created
// when making a pointer
data.removeOperandReference(0, extAddr);
protected void addExternalReference(Data pointerData, ImportInfo importInfo, MessageLog log) {
Address extAddr = (Address) pointerData.getValue();
if (extAddr != null) {
// remove the existing mem reference that was created when making a pointer
pointerData.removeOperandReference(0, extAddr);
// symTable.removeSymbol(symTable.getDynamicSymbol(extAddr));
try {
refManager.addExternalReference(address, importInfo.getDLL().toUpperCase(),
importInfo.getName(), extAddr, SourceType.IMPORTED, 0, RefType.DATA);
}
catch (DuplicateNameException e) {
log.appendMsg("External location not created: " + e.getMessage());
}
catch (InvalidInputException e) {
log.appendMsg("External location not created: " + e.getMessage());
}
try {
ReferenceManager refManager = pointerData.getProgram().getReferenceManager();
refManager.addExternalReference(pointerData.getAddress(),
importInfo.getDLL().toUpperCase(),
importInfo.getName(), extAddr, SourceType.IMPORTED, 0, RefType.DATA);
}
catch (DuplicateNameException e) {
log.appendMsg("External location not created: " + e.getMessage());
}
catch (InvalidInputException e) {
log.appendMsg("External location not created: " + e.getMessage());
}
}
}
@ -544,11 +557,11 @@ public class PeLoader extends AbstractPeDebugLoader {
}
/**
* Mark this location as code in the CodeMap.
* The analyzers will pick this up and disassemble the code.
* Mark this location as code in the CodeMap. The analyzers will pick this up and disassemble
* the code.
*
* TODO: this should be in a common place, so all importers can communicate that something
* is code or data.
* TODO: this should be in a common place, so all importers can communicate that something is
* code or data.
*
* @param program The program to mark up.
* @param address The location.
@ -687,7 +700,7 @@ public class PeLoader extends AbstractPeDebugLoader {
}
}
private Map<SectionHeader, Address> processMemoryBlocks(PortableExecutable pe, Program prog,
protected Map<SectionHeader, Address> processMemoryBlocks(PortableExecutable pe, Program prog,
FileBytes fileBytes, TaskMonitor monitor, MessageLog log)
throws AddressOverflowException {
@ -803,7 +816,7 @@ public class PeLoader extends AbstractPeDebugLoader {
return sectionToAddress;
}
private int getVirtualSize(PortableExecutable pe, SectionHeader[] sections,
protected int getVirtualSize(PortableExecutable pe, SectionHeader[] sections,
AddressSpace space) {
DOSHeader dosHeader = pe.getDOSHeader();
OptionalHeader optionalHeader = pe.getNTHeader().getOptionalHeader();
@ -1019,6 +1032,7 @@ public class PeLoader extends AbstractPeDebugLoader {
/**
* Return true if chararray appears in full, starting at offset bytestart in bytearray
*
* @param bytearray the array of bytes containing the potential match
* @param bytestart the potential start of the match
* @param chararray the array of characters to match

View File

@ -26,6 +26,7 @@ dependencies {
api project(':Base')
api project(':Recognizers')
api project(':PDB')
api ':dex-ir:2.0'
api ':dex-reader:2.0'

View File

@ -13,5 +13,6 @@ data/ExtensionPoint.manifest||GHIDRA||||END|
data/android/eclipse-classpath||GHIDRA||reviewed||END|
data/android/eclipse-project||GHIDRA||reviewed||END|
data/crypto/README.txt||GHIDRA||||END|
data/languages/dumpfile.opinion||GHIDRA||||END|
src/main/help/help/TOC_Source.xml||GHIDRA||||END|
src/main/help/help/topics/FileFormatsPlugin/FileFormats.html||GHIDRA||||END|

View File

@ -0,0 +1,48 @@
<opinions>
<constraint loader="Dump File Loader">
<constraint compilerSpecID="windows">
<!-- Page/Userdump MachineImageType values -->
<!-- constraint primary="21064" processor="ALPHA" endian="little" size="64" -->
<constraint primary="1824" processor="ARM" endian="little" size="32" variant="v8" />
<constraint primary="2080" processor="ARM" endian="little" size="32" variant="v8T" /> <!-- THUMB -->
<constraint primary="2336" processor="ARM" endian="little" size="32" variant="v8T" /> <!-- THUMB -->
<constraint primary="2577" processor="ARM" endian="little" size="32" variant="v8T" /> <!-- THUMB -->
<constraint primary="70001" processor="ARM" endian="little" size="32" variant="v8T" /> <!-- THUMB -->
<!-- constraint primary="2200" processor="IA64" endian="little" size="64" -->
<constraint primary="4000" processor="MIPS" endian="little" size="32" variant="default" /> <!-- R4000 -->
<!-- constraint primary="821" processor="MOTOROLA" endian="little" size="64" -->
<!-- constraint primary="18767" processor="OPTIL" endian="little" size="64" -->
<constraint primary="103" processor="SuperH3" endian="little" size="32" />
<constraint primary="104" processor="SuperH4" endian="little" size="32" />
<constraint primary="10003" processor="SuperH4" endian="little" size="32" />
<constraint primary="10004" processor="SuperH4" endian="little" size="32" />
<constraint primary="10005" processor="SuperH4" endian="little" size="32" />
<constraint primary="386" processor="x86" endian="little" size="32" />
<constraint primary="486" processor="x86" endian="little" size="32" />
<constraint primary="586" processor="x86" endian="little" size="32" />
<constraint primary="8664" processor="x86" endian="little" size="64" />
<!-- MDMP Architecture values -->
<constraint primary="0" processor="x86" endian="little" size="32" />
<constraint primary="1" processor="MIPS" endian="little" size="32" />
<!-- constraint primary="2" processor="ALPHA" endian="little" size="32" -->
<constraint primary="4" processor="SuperH4" endian="little" size="32" />
<constraint primary="5" processor="ARM" endian="little" size="32" />
<!-- constraint primary="6" processor="IA64" endian="little" size="32" -->
<!-- constraint primary="7" processor="ALPHA64" endian="little" size="32" -->
<!-- constraint primary="8" processor="MSIL" endian="little" size="32" -->
<constraint primary="9" processor="x86" endian="little" size="64" />
<!-- constraint primary="10" processor="IA32/64" endian="little" size="32" -->
<!-- constraint primary="11" processor="NEUTRAL" endian="little" size="32" -->
<constraint primary="12" processor="ARM" endian="little" size="64" />
<constraint primary="13" processor="ARM" endian="little" size="32" />
<!-- constraint primary="14" processor="IA32" endian="little" size="32" -->
</constraint>
<constraint compilerSpecID="default">
<constraint primary="601" processor="PowerPC" endian="little" size="32" />
<constraint primary="603" processor="PowerPC" endian="little" size="32" />
<constraint primary="604" processor="PowerPC" endian="little" size="32" />
<constraint primary="620" processor="PowerPC" endian="little" size="32" />
<constraint primary="3" processor="PowerPC" endian="little" size="32" />
</constraint>
</constraint>
</opinions>

View File

@ -0,0 +1,35 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//Given a raw binary PE image,
//this script will create data structures
//representing the PE header. Including,
//but not limited to, the PE header,
//section headers, optional header, etc.
//@category Binary
import ghidra.app.script.GhidraScript;
import ghidra.file.formats.dump.cmd.ModuleToPeHelper;
public class ApplyPEToDumpFileScript extends GhidraScript {
@Override
public void run() throws Exception {
if (currentProgram != null) {
ModuleToPeHelper.queryModules(currentProgram, monitor);
}
}
}

View File

@ -0,0 +1,137 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump;
import ghidra.program.model.address.Address;
public class DumpAddressObject {
private String providerId;
private long rva;
private long base;
private long length;
private boolean isRead = true;
private boolean isWrite = true;
private boolean isExec = true;
private String comment;
private Address address;
private String rangeName;
public DumpAddressObject(String providerId, long rva, long base, long length) {
this.providerId = providerId;
this.rva = rva;
this.base = base;
this.length = length;
}
public String getProviderId() {
return providerId;
}
public void setProviderId(String providerId) {
this.providerId = providerId;
}
public long getRVA() {
return rva;
}
public void setRVA(long rva) {
this.rva = rva;
}
public long getBase() {
return base;
}
public void setBase(long base) {
this.base = base;
}
public void setLength(long length) {
this.length = length;
}
public long getLength() {
return length;
}
public long getAdjustedAddress(long addr) {
return addr - getBase() + getRVA();
}
public long getCopyLen(long addr, long size) {
if (addr - getRVA() + size > getLength()) {
return getLength() - (addr - getRVA());
}
return size;
}
public boolean isRead() {
return isRead;
}
public boolean isWrite() {
return isWrite;
}
public boolean isExec() {
return isExec;
}
public void setRead(boolean isRead) {
this.isRead = isRead;
}
/**
* @param isWrite the isWrite to set
*/
public void setWrite(boolean isWrite) {
this.isWrite = isWrite;
}
/**
* @param isExec the isExec to set
*/
public void setExec(boolean isExec) {
this.isExec = isExec;
}
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment;
}
public Address getAddress() {
return address;
}
public void setAddress(Address address) {
this.address = address;
}
public String getRangeName() {
return rangeName;
}
public void setRangeName(String name) {
this.rangeName = name;
}
}

View File

@ -0,0 +1,102 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump;
import ghidra.program.model.data.DataType;
public class DumpData {
private DataType dt;
private long offset;
private String name;
private boolean generateSymbol;
private boolean generateFragment;
private long size;
public DumpData(long offset, DataType dt) {
this(offset, dt, dt.getDisplayName(), false, true);
}
public DumpData(long offset, DataType dt, String name) {
this(offset, dt, name, true, true);
}
public DumpData(long offset, DataType dt, String name, boolean genSymbol, boolean genFragment) {
this.offset = offset;
this.dt = dt;
this.name = name;
this.generateSymbol = genSymbol;
this.generateFragment = genFragment;
this.size = dt.getLength();
}
public DumpData(long offset, String name, int size) {
this.offset = offset;
this.dt = null;
this.name = name;
this.generateSymbol = true;
this.generateFragment = true;
this.size = size;
}
public DataType getDataType() {
return dt;
}
public void setDataType(DataType dt) {
this.dt = dt;
}
public long getOffset() {
return offset;
}
public void setOffset(long offset) {
this.offset = offset;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public boolean isGenerateSymbol() {
return generateSymbol;
}
public void setGenerateSymbol(boolean genSymbol) {
this.generateSymbol = genSymbol;
}
public boolean isGenerateFragment() {
return generateFragment;
}
public void setGenerateFragment(boolean genFragment) {
this.generateFragment = genFragment;
}
public void setSize(int size) {
this.size = size;
}
public long getSize() {
return size;
}
}

View File

@ -0,0 +1,264 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump;
import java.io.IOException;
import java.math.BigInteger;
import java.util.*;
import java.util.Map.Entry;
import ghidra.app.plugin.core.analysis.AutoAnalysisManager;
import ghidra.app.plugin.core.datamgr.util.DataTypeArchiveUtility;
import ghidra.app.services.DataTypeManagerService;
import ghidra.app.util.Option;
import ghidra.program.model.address.Address;
import ghidra.program.model.address.AddressFactory;
import ghidra.program.model.data.*;
import ghidra.program.model.lang.Language;
import ghidra.program.model.lang.Register;
import ghidra.program.model.listing.*;
import ghidra.util.Msg;
import ghidra.util.task.TaskMonitor;
public class DumpFile {
protected List<DumpData> data = new ArrayList<DumpData>();
// Interior ranges are actual defined memory ranges.
// Exterior ranges are aggregates of interior ranges, typically corresponding to a module
protected Map<Address, DumpAddressObject> intAddressRanges = new HashMap<>();
protected Map<Address, DumpAddressObject> extAddressRanges = new HashMap<>();
protected ProgramBasedDataTypeManager dtm;
protected Program program;
protected DumpFileReader reader;
protected Language lang;
private Address minAddr;
protected List<Option> options;
protected List<DumpModule> modules = new ArrayList<DumpModule>();
protected long contextOffset;
// The following are not currently used, but might be at some point
// ProcessId/threadId match the current process & thread.
// Processes and threads, obviously, include other entries at the time
// of the crash
protected int processId = 0;
protected int threadId = 0;
protected List<String> processes = new ArrayList<String>();
protected List<String> threads = new ArrayList<String>();
protected Map<String, DataTypeManager> managerList = new HashMap<>();
public DumpFile(DumpFileReader reader, ProgramBasedDataTypeManager dtm, List<Option> options,
TaskMonitor monitor) {
this.reader = reader;
this.dtm = dtm;
this.program = dtm.getProgram();
this.lang = program.getLanguage();
AddressFactory factory = lang.getAddressFactory();
this.minAddr = factory.getAddressSet().getMinAddress();
this.options = options;
}
protected DataType getTypeFromArchive(String name) {
return getTypeFromArchive(null, name);
}
public DataType getTypeFromArchive(CategoryPath path, String name) {
DataType datatype = null;
for (DataTypeManager dtmx : managerList.values()) {
if (path == null) {
datatype = dtmx.getDataType(name);
}
else {
datatype = dtmx.getDataType(path, name);
}
if (datatype != null) {
break;
}
}
return datatype == null ? null : datatype.clone(null);
}
protected void initManagerList(List<String> addins) {
AutoAnalysisManager mgr = AutoAnalysisManager.getAnalysisManager(program);
DataTypeManagerService service = mgr.getDataTypeManagerService();
List<String> archiveList = DataTypeArchiveUtility.getArchiveList(program);
for (String archiveName : archiveList) {
addToManagerList(service, archiveName);
}
if (addins != null) {
for (String archiveName : addins) {
addToManagerList(service, archiveName);
}
}
managerList.put("this", dtm);
}
protected void addToManagerList(DataTypeManagerService service, String key) {
DataTypeManager val = null;
try {
val = service.openDataTypeArchive(key);
if (val != null) {
managerList.put(key, val);
}
}
catch (Exception e) {
Msg.error(this, e.getMessage());
}
}
protected DataType addDumpData(int offset, String name, CategoryPath path) {
DataType dt = path == null ? getTypeFromArchive(name) : getTypeFromArchive(path, name);
if (dt != null) {
data.add(new DumpData(offset, dt));
return dt;
}
data.add(new DumpData(offset, name, 0));
return null;
}
public List<DumpData> getData() {
return data;
}
public Map<Address, DumpAddressObject> getInteriorAddressRanges() {
return intAddressRanges;
}
public Map<Address, DumpAddressObject> getExteriorAddressRanges() {
return extAddressRanges;
}
protected DumpAddressObject getInteriorAddressObject(long base) {
Address address = minAddr.getNewAddress(base);
return intAddressRanges.get(address);
}
public void addInteriorAddressObject(String id, long rva, long base, long len) {
DumpAddressObject dao = new DumpAddressObject(id, rva, base, len);
Address address = minAddr.getNewAddress(base);
dao.setAddress(address);
intAddressRanges.put(address, dao);
}
public void addExteriorAddressObject(String id, int rva, long base, long len) {
Address address = minAddr.getNewAddress(base);
extAddressRanges.put(address, new DumpAddressObject(id, rva, base, len));
}
public Address getAddress(long addr) {
return minAddr.getNewAddress(addr);
}
public long getContextOffset() {
return contextOffset;
}
public String getProcessId() {
if (processId < 0 || processId > 0xFFFFF)
processId = 0;
return Integer.toHexString(processId);
}
public String getThreadId() {
if (threadId < 0 || threadId < 0xFFFFF)
threadId = 0;
return Integer.toHexString(threadId);
}
protected void addProcess(long pid, String name, int index) {
processes.add(Long.toHexString(pid) + ":" + name + ":" + index);
}
public List<String> getProcesses() {
return processes;
}
protected void addThread(long pid, long tid, int index) {
threads.add(Long.toHexString(tid) + ":" + Long.toHexString(pid) + ":" + index);
}
public List<String> getThreads() {
return threads;
}
protected void addModule(String name, long imageBase, int index, long size) {
if (name.indexOf('\\') >= 0) {
name = name.substring(name.lastIndexOf('\\') + 1);
}
if (name.indexOf('.') >= 0) {
name = name.substring(0, name.indexOf('.'));
}
modules.add(new DumpModule(name, index, imageBase, size));
//modules.add(name + ":" + Long.toHexString(imageBase & 0xFFFFFFFFL) + ":" +
// Integer.toHexString(index) + ":" + Long.toHexString((imageBase & 0xFFFFFFFFL) + size));
}
public List<DumpModule> getModules() {
return modules;
}
protected void setProgramContext(long offset, DataType dt, String tid) {
ProgramContext ctx = program.getProgramContext();
if (dt instanceof TypedefDataType) {
TypedefDataType typedef = (TypedefDataType) dt;
dt = typedef.getBaseDataType();
}
if (dt instanceof StructureDataType) {
Map<String, Long> map = new HashMap<>();
StructureDataType struct = (StructureDataType) dt;
DataTypeComponent[] components = struct.getComponents();
for (DataTypeComponent dtc : components) {
String fieldName = dtc.getFieldName();
int fieldOffset = dtc.getOffset();
try {
long fieldValue = reader.readPointer(offset + fieldOffset);
map.put(fieldName.toUpperCase(), fieldValue);
}
catch (IOException e) {
Msg.error(this, e.getMessage());
}
}
Register pc = program.getLanguage().getProgramCounter();
if (map.containsKey(pc.getName())) {
Long pcval = map.get(pc.getName());
Msg.info(this,
"Setting context for thread " + tid + " at " + Long.toHexString(pcval));
Address start = getAddress(pcval);
for (Entry<String, Long> entry : map.entrySet()) {
Register register = ctx.getRegister(entry.getKey());
if (register != null) {
try {
ctx.setValue(register, start, start,
BigInteger.valueOf(entry.getValue()));
}
catch (ContextChangeException e) {
Msg.error(this, e.getMessage());
}
}
}
}
}
}
public void analyze(TaskMonitor monitor) {
// Override if needed
}
}

View File

@ -0,0 +1,319 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump;
import java.io.IOException;
import java.util.*;
import java.util.Map.Entry;
import ghidra.app.util.*;
import ghidra.app.util.bin.ByteProvider;
import ghidra.app.util.importer.MessageLog;
import ghidra.app.util.opinion.*;
import ghidra.file.formats.dump.mdmp.Minidump;
import ghidra.file.formats.dump.pagedump.Pagedump;
import ghidra.file.formats.dump.userdump.Userdump;
import ghidra.framework.model.DomainObject;
import ghidra.framework.store.LockException;
import ghidra.program.database.mem.FileBytes;
import ghidra.program.database.mem.MemoryMapDB;
import ghidra.program.model.address.*;
import ghidra.program.model.data.DataUtilities;
import ghidra.program.model.data.ProgramBasedDataTypeManager;
import ghidra.program.model.lang.Language;
import ghidra.program.model.listing.Program;
import ghidra.program.model.mem.MemoryBlock;
import ghidra.program.model.mem.MemoryBlockException;
import ghidra.program.model.symbol.SourceType;
import ghidra.program.model.symbol.SymbolTable;
import ghidra.program.model.util.CodeUnitInsertionException;
import ghidra.util.Msg;
import ghidra.util.exception.*;
import ghidra.util.task.TaskMonitor;
/**
* A {@link Loader} for processing dump files and their embedded objects.
*/
public class DumpFileLoader extends AbstractLibrarySupportLoader {
/** The name of the dump file loader */
public final static String DF_NAME = "Dump File Loader";
public static final String CREATE_MEMORY_BLOCKS_OPTION_NAME = "Create Memory Blocks";
public static final String DEBUG_DATA_PATH_OPTION_NAME =
"Debug Data Path (e.g. /path/to/ntoskrnl.pdb)";
public static final String JOIN_BLOCKS_OPTION_NAME = "Join Blocks";
public static final String ANALYZE_EMBEDDED_OBJECTS_OPTION_NAME =
"Analyze Embedded Executables (interactive)";
public static final boolean CREATE_MEMORY_BLOCKS_OPTION_DEFAULT = true;
public static final String DEBUG_DATA_PATH_OPTION_DEFAULT = "";
public static final boolean JOIN_BLOCKS_OPTION_DEFAULT = false;
public static final boolean ANALYZE_EMBEDDED_OBJECTS_OPTION_DEFAULT = false; // must be off by default
public static final String MEMORY = "Memory";
private Map<AddressRange, String> ranges = new HashMap<>();
private MessageLog log;
private boolean joinBlocks;
@Override
public String getName() {
return DF_NAME;
}
@Override
public Collection<LoadSpec> findSupportedLoadSpecs(ByteProvider provider) throws IOException {
List<LoadSpec> loadSpecs = new ArrayList<>();
String machineType = getMachineType(provider);
if (machineType != null) {
List<QueryResult> results = QueryOpinionService.query(getName(), machineType, null);
for (QueryResult result : results) {
loadSpecs.add(new LoadSpec(this, 0, result));
}
if (loadSpecs.isEmpty()) {
loadSpecs.add(new LoadSpec(this, 0, true));
}
}
return loadSpecs;
}
private String getMachineType(ByteProvider provider) {
DumpFileReader reader = new DumpFileReader(provider, true, 64);
int signature;
try {
signature = reader.readInt(0);
switch (signature) {
case Pagedump.SIGNATURE:
return Pagedump.getMachineType(reader);
case Userdump.SIGNATURE:
return Userdump.getMachineType(reader);
case Minidump.SIGNATURE:
return Minidump.getMachineType(reader);
}
}
catch (IOException e) {
//Ignore
}
return null;
}
@Override
@SuppressWarnings("hiding")
protected void load(ByteProvider provider, LoadSpec loadSpec, List<Option> options,
Program program, TaskMonitor monitor, MessageLog log)
throws CancelledException, IOException {
this.log = log;
parseDumpFile(provider, program, options, monitor);
}
private void parseDumpFile(ByteProvider provider, Program program, List<Option> options,
TaskMonitor monitor) throws IOException, CancelledException {
Language language = program.getLanguage();
int size = language.getDefaultSpace().getSize();
DumpFileReader reader = new DumpFileReader(provider, true, size);
joinBlocks = OptionUtils.getBooleanOptionValue(JOIN_BLOCKS_OPTION_NAME, options,
DumpFileLoader.JOIN_BLOCKS_OPTION_DEFAULT);
ProgramBasedDataTypeManager dtm = program.getDataTypeManager();
DumpFile df = null;
int signature = reader.readInt(0);
switch (signature) {
case Pagedump.SIGNATURE:
df = new Pagedump(reader, dtm, options, monitor);
break;
case Userdump.SIGNATURE:
df = new Userdump(reader, dtm, options, monitor);
break;
case Minidump.SIGNATURE:
df = new Minidump(reader, dtm, options, monitor);
break;
}
if (df != null) {
groupRanges(program, provider, df.getExteriorAddressRanges(), monitor);
loadRanges(program, provider, df.getInteriorAddressRanges(), monitor);
applyStructures(program, df, monitor);
df.analyze(monitor);
}
}
public void loadRanges(Program program, ByteProvider provider,
Map<Address, DumpAddressObject> daos, TaskMonitor monitor) {
try {
monitor.setMessage("Creating file bytes");
FileBytes fileBytes = MemoryBlockUtils.createFileBytes(program, provider, monitor);
int count = 0;
monitor.setMessage("Tagging blocks");
monitor.initialize(daos.size());
for (Address address : daos.keySet()) {
DumpAddressObject d = daos.get(address);
String name = d.getProviderId();
for (AddressRange range : ranges.keySet()) {
if (range.contains(address)) {
name = ranges.get(range);
break;
}
}
d.setRangeName(name);
monitor.setProgress(count++);
monitor.checkCanceled();
}
count = 0;
monitor.setMessage("Processing blocks");
monitor.initialize(daos.size());
for (Address address : daos.keySet()) {
DumpAddressObject d = daos.get(address);
try {
MemoryBlockUtils.createInitializedBlock(program, false, d.getRangeName(),
address, fileBytes,
d.getRVA(), // offset into filebytes
d.getLength(), // size
d.getComment(), // comment
null, // source
d.isRead(), // section.isReadonly(),
d.isWrite(), // section.isWriteable(),
d.isExec(), //section.isExecutable());
log);
monitor.setProgress(count++);
monitor.checkCanceled();
}
catch (AddressOutOfBoundsException | AddressOverflowException
| IllegalArgumentException e) {
Msg.warn(this, e.getMessage());
}
}
if (joinBlocks) {
Set<Address> deleted = new HashSet<>();
count = 0;
monitor.setMessage("Joining blocks");
monitor.initialize(daos.size());
MemoryMapDB memory = (MemoryMapDB) program.getMemory();
for (Address address : daos.keySet()) {
if (deleted.contains(address)) {
continue;
}
MemoryBlock m = memory.getBlock(address);
MemoryBlock next;
while ((next = memory.getBlock(address.addWrap(m.getSize()))) != null) {
if (!next.getStart().equals(m.getStart().addWrap(m.getSize()))) {
break;
}
try {
m = memory.join(m, next);
}
catch (MemoryBlockException | LockException | NotFoundException e) {
break;
}
deleted.add(next.getStart());
monitor.setProgress(count++);
monitor.checkCanceled();
}
monitor.setProgress(count++);
monitor.checkCanceled();
//memory.invalidateCache(true);
}
}
}
catch (CancelledException | IOException e1) {
Msg.error(this, e1.getMessage());
}
}
public void groupRanges(Program program, ByteProvider provider,
Map<Address, DumpAddressObject> daos, TaskMonitor monitor) throws CancelledException {
monitor.setMessage("Assigning ranges");
monitor.initialize(daos.size());
int count = 0;
for (Entry<Address, DumpAddressObject> entry : daos.entrySet()) {
DumpAddressObject d = entry.getValue();
Address address = entry.getKey();
if (d.getBase() == 0) {
continue;
}
try {
AddressRangeImpl range = new AddressRangeImpl(address, d.getLength());
ranges.put(range, d.getProviderId());
}
catch (AddressOverflowException | AddressOutOfBoundsException
| IllegalArgumentException e) {
Msg.warn(this, e.getMessage());
}
monitor.setProgress(count++);
monitor.checkCanceled();
}
}
private void applyStructures(Program program, DumpFile df, TaskMonitor monitor)
throws CancelledException {
SymbolTable symbolTable = program.getSymbolTable();
monitor.setMessage("Applying data structures");
List<DumpData> data = df.getData();
monitor.initialize(data.size());
int count = 0;
for (DumpData dd : data) {
Address address = program.getImageBase().addWrap(dd.getOffset());
try {
if (dd.getDataType() == null) {
try {
symbolTable.createLabel(address, dd.getName(), SourceType.IMPORTED);
}
catch (InvalidInputException e) {
Msg.error(this,
"Error creating label " + dd.getName() + " at address " + address +
": " + e.getMessage());
}
continue;
}
DataUtilities.createData(program, address, dd.getDataType(), -1, false,
DataUtilities.ClearDataMode.CHECK_FOR_SPACE);
}
catch (CodeUnitInsertionException e) {
Msg.error(this, "Could not create " + dd.getDataType().getName() + " at " + address);
}
monitor.setProgress(count++);
monitor.checkCanceled();
}
}
@Override
public List<Option> getDefaultOptions(ByteProvider provider, LoadSpec loadSpec,
DomainObject domainObject, boolean isLoadIntoProgram) {
List<Option> list = new ArrayList<>();
list.add(new Option(CREATE_MEMORY_BLOCKS_OPTION_NAME, CREATE_MEMORY_BLOCKS_OPTION_DEFAULT,
Boolean.class, Loader.COMMAND_LINE_ARG_PREFIX + "-createMemoryBlocks"));
list.add(new Option(DEBUG_DATA_PATH_OPTION_NAME, DEBUG_DATA_PATH_OPTION_DEFAULT,
String.class, Loader.COMMAND_LINE_ARG_PREFIX + "-debugDataFilePath"));
list.add(new Option(JOIN_BLOCKS_OPTION_NAME, JOIN_BLOCKS_OPTION_DEFAULT, Boolean.class,
Loader.COMMAND_LINE_ARG_PREFIX + "-joinBlocks"));
list.add(new Option(ANALYZE_EMBEDDED_OBJECTS_OPTION_NAME,
ANALYZE_EMBEDDED_OBJECTS_OPTION_DEFAULT));
return list;
}
@Override
public String validateOptions(ByteProvider provider, LoadSpec loadSpec, List<Option> options,
Program program) {
return super.validateOptions(provider, loadSpec, options, program);
}
}

View File

@ -0,0 +1,48 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump;
import java.io.IOException;
import ghidra.app.util.bin.BinaryReader;
import ghidra.app.util.bin.ByteProvider;
public class DumpFileReader extends BinaryReader {
private int size;
public DumpFileReader(ByteProvider provider, boolean isLittleEndian, int size) {
super(provider, isLittleEndian);
this.size = size;
}
public long readNextPointer() throws IOException {
return size == 32 ? readNextInt() : readNextLong();
}
public long readPointer(long offset) throws IOException {
return size == 32 ? readInt(offset) : readLong(offset);
}
public int getPointerSize() {
return size / 8;
}
public void setPointerSize(int size) {
this.size = size;
}
}

View File

@ -0,0 +1,64 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump;
public class DumpModule {
private String name;
private int index;
private long base;
private long size;
public DumpModule(String name, int index, long base, long size) {
this.setName(name);
this.setIndex(index);
this.setBase(base);
this.setSize(size);
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getIndex() {
return index;
}
public void setIndex(int index) {
this.index = index;
}
public long getBase() {
return base;
}
public void setBase(long base) {
this.base = base;
}
public long getSize() {
return size;
}
public void setSize(long size) {
this.size = size;
}
}

View File

@ -0,0 +1,241 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.cmd;
import java.io.IOException;
import java.util.*;
import ghidra.app.util.Option;
import ghidra.app.util.bin.ByteProvider;
import ghidra.app.util.bin.format.pe.*;
import ghidra.app.util.bin.format.pe.PortableExecutable.SectionLayout;
import ghidra.app.util.importer.MessageLog;
import ghidra.app.util.opinion.LoadSpec;
import ghidra.app.util.opinion.PeLoader;
import ghidra.program.database.ProgramDB;
import ghidra.program.database.mem.FileBytes;
import ghidra.program.database.mem.MemoryMapDB;
import ghidra.program.model.address.*;
import ghidra.program.model.listing.*;
import ghidra.util.Msg;
import ghidra.util.exception.*;
import ghidra.util.task.TaskMonitor;
public class DumpPeShim extends PeLoader {
private ProgramDB program;
private ProgramFragment fragment;
private ProgramModule rootModule;
private ProgramModule module;
public DumpPeShim(ProgramDB program) {
this.program = program;
}
protected void load(ByteProvider provider, LoadSpec loadSpec, List<Option> options,
Program pgm, ProgramFragment frag, TaskMonitor monitor, MessageLog log)
throws IOException, CancelledException {
Collection<LoadSpec> loadSpecs = findSupportedLoadSpecs(provider);
if (loadSpecs.isEmpty()) {
Msg.error(this, "Not a valid PE image: " + frag.getName());
return;
}
generateModule(pgm, frag);
Address minAddress = module.getMinAddress();
if (minAddress.getOffset() == 0) {
Msg.warn(this, "Zero-based fragment - skipping");
return;
}
program.setEffectiveImageBase(minAddress);
try {
load(provider, loadSpec, options, program, monitor, log);
monitor.checkCanceled();
}
finally {
program.setEffectiveImageBase(null);
}
shiftModule();
}
private void generateModule(Program pgm, ProgramFragment frag) {
this.rootModule = pgm.getListing().getRootModule(0);
this.fragment = frag;
String name = fragment.getName();
try {
fragment.setName(name + "_pad");
module = rootModule.createModule(name);
module.reparent(name + "_pad", rootModule);
}
catch (DuplicateNameException e) {
Msg.error(this, "Unable to convert " + name);
}
catch (NotFoundException e) {
Msg.error(this, "Unable to reparent " + name);
}
}
private void shiftModule() {
try {
module.moveChild(module.getName() + "_pad", module.getNumChildren() - 1);
}
catch (NotFoundException e) {
Msg.error(this, "Unable to reparent " + module.getName());
}
}
protected SectionLayout getSectionLayout() {
return SectionLayout.MEMORY;
}
protected FileBytes createFileBytes(ByteProvider provider, Program pgm, TaskMonitor monitor)
throws IOException, CancelledException {
List<FileBytes> fileBytesList = pgm.getMemory().getAllFileBytes();
return fileBytesList.get(0);
}
private void adjustBlock(Address address, long size, String name) {
String fragmentName = module.getName() + "_" + name;
try {
MemoryMapDB memory = program.getMemory();
if (memory.contains(address)) {
ProgramFragment frag = module.createFragment(fragmentName);
frag.move(address, address.add(size - 1));
}
}
catch (NotFoundException e) {
Msg.warn(this, "Fragment not in memory " + fragmentName);
}
catch (NullPointerException e) {
Msg.error(this, "Unable to reparent " + fragmentName);
}
catch (DuplicateNameException e) {
//Msg.warn(this, "Duplicate name exception: " + fragmentName);
}
}
protected Map<SectionHeader, Address> processMemoryBlocks(PortableExecutable pe, Program prog,
FileBytes fileBytes, TaskMonitor monitor, MessageLog log)
throws AddressOverflowException {
AddressFactory af = prog.getAddressFactory();
AddressSpace space = af.getDefaultAddressSpace();
Map<SectionHeader, Address> sectionToAddress = new HashMap<>();
if (monitor.isCancelled()) {
return sectionToAddress;
}
monitor.setMessage("[" + prog.getName() + "]: processing memory blocks...");
NTHeader ntHeader = pe.getNTHeader();
FileHeader fileHeader = ntHeader.getFileHeader();
OptionalHeader optionalHeader = ntHeader.getOptionalHeader();
SectionHeader[] sections = fileHeader.getSectionHeaders();
if (sections.length == 0) {
Msg.warn(this, "No sections found");
}
// Header block
int virtualSize = (int) Math.min(getVirtualSize(pe, sections, space), fileBytes.getSize());
long addr = optionalHeader.getImageBase();
Address address = space.getAddress(addr);
adjustBlock(address, virtualSize, HEADERS);
// Section blocks
try {
for (int i = 0; i < sections.length; ++i) {
if (monitor.isCancelled()) {
return sectionToAddress;
}
addr = sections[i].getVirtualAddress() + optionalHeader.getImageBase();
address = space.getAddress(addr);
int rawDataSize = sections[i].getSizeOfRawData();
int rawDataPtr = sections[i].getPointerToRawData();
virtualSize = sections[i].getVirtualSize();
if (rawDataSize != 0 && rawDataPtr != 0) {
int dataSize =
((rawDataSize > virtualSize && virtualSize > 0) || rawDataSize < 0)
? virtualSize
: rawDataSize;
if (ntHeader.checkRVA(dataSize) ||
(0 < dataSize && dataSize < pe.getFileLength())) {
if (!ntHeader.checkRVA(dataSize)) {
Msg.warn(this, "OptionalHeader.SizeOfImage < size of " +
sections[i].getName() + " section");
}
String sectionName = sections[i].getReadableName();
if (sectionName.isBlank()) {
sectionName = "SECTION." + i;
}
sectionToAddress.put(sections[i], address);
adjustBlock(address, virtualSize, sectionName);
}
if (rawDataSize == virtualSize) {
continue;
}
else if (rawDataSize > virtualSize) {
// virtual size fully initialized
continue;
}
// remainder of virtual size is uninitialized
if (rawDataSize < 0) {
Msg.error(this,
"Section[" + i + "] has invalid size " +
Integer.toHexString(rawDataSize) + " (" +
Integer.toHexString(virtualSize) + ")");
break;
}
virtualSize -= rawDataSize;
address = address.add(rawDataSize);
}
if (virtualSize == 0) {
Msg.error(this, "Section[" + i + "] has size zero");
}
else {
int dataSize = (virtualSize > 0 || rawDataSize < 0) ? virtualSize : 0;
if (dataSize > 0) {
sectionToAddress.put(sections[i], address);
adjustBlock(address, virtualSize, sections[i].getReadableName());
}
}
}
}
catch (IllegalStateException ise) {
if (optionalHeader.getFileAlignment() != optionalHeader.getSectionAlignment()) {
throw new IllegalStateException(ise);
}
Msg.warn(this, "Section header processing aborted");
}
return sectionToAddress;
}
@Override
protected void addExternalReference(Data pointerData, ImportInfo importInfo, MessageLog log) {
// Ignore
}
}

View File

@ -0,0 +1,72 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.cmd;
import java.util.*;
import docking.widgets.ListSelectionTableDialog;
import ghidra.app.util.bin.ByteProvider;
import ghidra.app.util.importer.MessageLog;
import ghidra.program.database.ProgramDB;
import ghidra.program.model.listing.*;
import ghidra.program.model.mem.Memory;
import ghidra.util.Msg;
import ghidra.util.task.TaskMonitor;
public class ModuleToPeHelper {
public static void queryModules(Program program, TaskMonitor taskMon) {
Memory memory = program.getMemory();
ProgramModule root = program.getListing().getDefaultRootModule();
Group[] children = root.getChildren();
List<String> names = new ArrayList<>();
Map<String, Group> map = new HashMap<>();
for (Group child : children) {
names.add(child.getName());
map.put(child.getName(), child);
}
ListSelectionTableDialog<String> dialog =
new ListSelectionTableDialog<String>("Modules To Apply", names);
List<String> selected = dialog.showSelectMultiple(null);
for (String key : selected) {
Group value = map.get(key);
if (value instanceof ProgramFragment) {
ProgramFragment mod = (ProgramFragment) value;
if (mod.isEmpty()) {
continue;
}
try {
taskMon.setMessage("Analyzing " + mod.getName());
taskMon.checkCanceled();
ByteProvider provider = new ProgramInsertByteProvider(memory, mod);
DumpPeShim loader = new DumpPeShim((ProgramDB) program);
loader.load(provider, null, null, program, mod,
taskMon, new MessageLog());
}
catch (Exception e) {
// Ignore
Msg.error(null, e.getMessage());
taskMon.clearCanceled();
}
}
}
}
}

View File

@ -0,0 +1,110 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.cmd;
import java.io.File;
import java.io.IOException;
import ghidra.app.util.bin.ByteProvider;
import ghidra.program.model.listing.ProgramFragment;
import ghidra.program.model.mem.*;
/**
* <code>MemBufferByteProvider</code> provide a {@link ByteProvider} backed by a {@link MemBuffer}.
*/
public class ProgramInsertByteProvider implements ByteProvider {
private MemoryBufferImpl buffer;
private ProgramFragment mod;
private long len;
public ProgramInsertByteProvider(Memory memory, ProgramFragment mod) {
this.mod = mod;
len = mod.getMaxAddress().subtract(mod.getMinAddress()) + 1;
buffer = new MemoryBufferImpl(memory, mod.getMinAddress(), (int) len);
}
@Override
public File getFile() {
return null;
}
@Override
public String getName() {
return mod.getName();
}
@Override
public String getAbsolutePath() {
return null;
}
/**
* Return maximum length since actual length is unknown
*
* @return maximum possible length
*/
@Override
public long length() {
return len;
}
@Override
public boolean isValidIndex(long index) {
if (index < 0 || index > len) {
return false;
}
try {
buffer.getByte((int) index);
return true;
}
catch (MemoryAccessException e) {
return false;
}
}
@Override
public void close() throws IOException {
// not applicable
}
@Override
public byte readByte(long index) throws IOException {
if (index < 0 || index > Integer.MAX_VALUE) {
throw new IOException("index out of range");
}
try {
return buffer.getByte((int) index);
}
catch (MemoryAccessException e) {
throw new RuntimeException("index out of range");
}
}
@Override
public byte[] readBytes(long index, long length) throws IOException {
if (index < 0 || (index + length - 1) > Integer.MAX_VALUE) {
throw new IOException("index/length of range");
}
int ilen = (int) length;
byte[] bytes = new byte[ilen];
if (buffer.getBytes(bytes, (int) index) != ilen) {
throw new RuntimeException("index/length of range");
}
return bytes;
}
}

View File

@ -0,0 +1,71 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.DataType;
import ghidra.program.model.data.StructureDataType;
import ghidra.util.exception.DuplicateNameException;
public class CommentStreamA implements StructConverter {
public final static String NAME = "MINIDUMP_COMMENT_A";
private int commentLength;
private String comment;
private DumpFileReader reader;
private long index;
CommentStreamA(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
commentLength = reader.readNextInt();
comment = reader.readNextAsciiString();
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "CommentLength", null);
struct.add(ASCII, commentLength, "Comment", null);
return struct;
}
public String getCommentA() {
return comment;
}
public void setCommentA(String comment) {
this.comment = comment;
}
}

View File

@ -0,0 +1,71 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.DataType;
import ghidra.program.model.data.StructureDataType;
import ghidra.util.exception.DuplicateNameException;
public class CommentStreamW implements StructConverter {
public final static String NAME = "MINIDUMP_COMMENT_W";
private int commentLength;
private String comment;
private DumpFileReader reader;
private long index;
CommentStreamW(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
commentLength = reader.readNextInt();
comment = reader.readNextUnicodeString();
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "CommentLength", null);
struct.add(UTF16, commentLength, "Comment", null);
return struct;
}
public String getCommentW() {
return comment;
}
public void setCommentW(String comment) {
this.comment = comment;
}
}

View File

@ -0,0 +1,119 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class CvRecord implements StructConverter {
public final static String NAME = "MINIDUMP_CV_RECORD";
private int pdbFormat;
private byte[] pdbSigGUID = new byte[16];
private int pdbAge;
private byte[] pdbName;
private DumpFileReader reader;
private long index;
private int nameLength;
CvRecord(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setPdbFormat(reader.readNextInt());
for (int i = 0; i < pdbSigGUID.length; i++) {
setPdbSigGUID(reader.readNextByte(), i);
}
setPdbAge(reader.readNextInt());
nameLength = getNameLength(reader, reader.getPointerIndex());
pdbName = new byte[nameLength];
for (int i = 0; i < nameLength; i++) {
setPdbName(reader.readNextByte(), i);
}
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "PdbFormat", null);
ArrayDataType adt = new ArrayDataType(BYTE, 16, 1);
struct.add(adt, 16, "PdbSigGUID", null);
struct.add(DWORD, 4, "PdbAge", null);
//struct.add(STRING,nameLength,"PdbName",null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public int getPdbFormat() {
return pdbFormat;
}
public void setPdbFormat(int pdbFormat) {
this.pdbFormat = pdbFormat;
}
public byte[] getPdbSigGUID() {
return pdbSigGUID;
}
public void setPdbSigGUID(byte b, int index) {
this.pdbSigGUID[index] = b;
}
public int getPdbAge() {
return pdbAge;
}
public void setPdbAge(int pdbAge) {
this.pdbAge = pdbAge;
}
public byte[] getPdbName() {
return pdbName;
}
public void setPdbName(byte b, int index) {
this.pdbName[index] = b;
}
public static int getNameLength(DumpFileReader r, long pos) throws IOException {
int i = 0;
while (r.readNextByte() != 0)
i++;
r.setPointerIndex(pos);
return i;
}
}

View File

@ -0,0 +1,117 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class Directory implements StructConverter {
public final static String NAME = "MINIDUMP_DIRECTORY";
public final static int THREAD_LIST_STREAM = 3;
public final static int MODULE_LIST_STREAM = 4;
public final static int MEMORY_LIST_STREAM = 5;
public final static int EXCEPTION_STREAM = 6;
public final static int SYSTEM_INFO_STREAM = 7;
public final static int THREAD_EX_LIST_STREAM = 8;
public final static int MEMORY64_LIST_STREAM = 9;
public final static int HANDLE_LIST_STREAM = 0xC;
public final static int UNLOADED_MODULE_LIST_STREAM = 0xE;
public final static int MISC_INFO_STREAM = 0xF;
public final static int MEMORY_INFO_LIST_STREAM = 0x10;
public final static int TOKEN_LIST_STREAM = 0x13;
private final static String[] names = {
"UnusedStream", "ReservedStream0", "ReservedStream1", "ThreadListStream",
"ModuleListStream", "MemoryListStream", "ExceptionStream", "SystemInfoStream ",
"ThreadExListStream", "Memory64ListStream", "CommentStreamA", "CommentStreamW",
"HandleDataStream", "FunctionTableStream", "UnloadedModuleListStream", "MiscInfoStream",
"MemoryInfoListStream", "ThreadInfoListStream", "HandleOperationListStream",
"TokenStream", "JavaScriptDataStream", "SystemMemoryInfoStream", "ProcessVmCountersStream",
"IptTraceStream", "ThreadNamesStream"
};
private int streamType;
private int dataSize;
private long rva;
private DumpFileReader reader;
private long index;
Directory(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setStreamType(reader.readNextInt());
setDataSize(reader.readNextInt());
setRVA(reader.readNextInt());
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "StreamType", null);
struct.add(DWORD, 4, "DataSize", null);
struct.add(Pointer32DataType.dataType, 4, "RVA", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public void setStreamType(int streamType) {
this.streamType = streamType;
}
public int getStreamType() {
return streamType;
}
public void setDataSize(int dataSize) {
this.dataSize = dataSize;
}
public int getDataSize() {
return dataSize;
}
public void setRVA(long rva) {
this.rva = rva;
}
public long getRVA() {
return rva;
}
public String getReadableName() {
return names[streamType];
}
}

View File

@ -0,0 +1,208 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class ExceptionStream implements StructConverter {
public final static String NAME = "MINIDUMP_EXCEPTION";
public final static int EXCEPTION_MAXIMUM_PARAMETERS = 15;
private int threadId;
private int exceptionCode;
private int exceptionFlags;
private long exceptionRecord;
private long exceptionAddress;
private int numberOfParameters;
private long[] exceptionInformation;
private int contextDataSize;
private int contextRVA;
private StructureDataType defaultContext;
private String[] keys1 = { "ContextFlags", "DR0", "DR1", "DR2", "DR3", "DR6", "DR7" };
private String[] keys2 = { "GS", "FS", "ES", "DS", "EDI", "ESI", "EBX", "EDX", "ECX", "EAX",
"EBP", "EIP", "CS", "eflags", "ESP", "SS" };
private DumpFileReader reader;
private long index;
ExceptionStream(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
getRVAs();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setThreadId(reader.readNextInt());
reader.readNextInt();
setExceptionCode(reader.readNextInt());
setExceptionFlags(reader.readNextInt());
setExceptionRecord(reader.readNextLong());
setExceptionAddress(reader.readNextLong());
setNumberOfParameters(reader.readNextInt());
reader.readNextInt();
exceptionInformation = new long[EXCEPTION_MAXIMUM_PARAMETERS];
for (int i = 0; i < EXCEPTION_MAXIMUM_PARAMETERS; i++) {
setExceptionInformation(reader.readNextLong(), i);
}
setContextDataSize(reader.readNextInt());
setContextRVA(reader.readNextInt());
}
private void getRVAs() {
long pos = reader.getPointerIndex();
reader.setPointerIndex(getContextRVA());
defaultContext =
new StructureDataType("ExceptionContext_" + Integer.toHexString(threadId), 0);
for (int i = 0; i < 7; i++) {
defaultContext.add(DWORD, keys1[i], null);
}
for (int i = 7; i < 35; i++) {
defaultContext.add(DWORD, "", null);
}
for (int i = 35; i < 51; i++) {
defaultContext.add(DWORD, keys2[i - 35], null);
}
for (int i = 51; i < 179 && i < getContextDataSize() / 4; i++) {
defaultContext.add(DWORD, "", null);
}
reader.setPointerIndex(pos);
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "ThreadId", null);
StructureDataType s0 = new StructureDataType("ExceptionRecord", 0);
s0.add(DWORD, 4, "__alignment", null);
s0.add(DWORD, 4, "ExceptionCode", null);
s0.add(DWORD, 4, "ExceptionFlags", null);
s0.add(QWORD, 8, "ExceptionRecord", null);
s0.add(QWORD, 8, "ExceptionAddress", null);
s0.add(DWORD, 4, "NumberParameters", null);
s0.add(DWORD, 4, "__unusedAlignment", null);
ArrayDataType a = new ArrayDataType(QWORD, EXCEPTION_MAXIMUM_PARAMETERS, 8);
s0.add(a, a.getLength(), "ExceptionInformation", null);
StructureDataType s1 = new StructureDataType("ThreadContext", 0);
s1.add(DWORD, 4, "DataSize", null);
s1.add(Pointer32DataType.dataType, 4, "RVA", null);
struct.add(s0, s0.getLength(), s0.getDisplayName(), null);
struct.add(s1, s1.getLength(), s1.getDisplayName(), null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public void setThreadId(int threadId) {
this.threadId = threadId;
}
public int getThreadId() {
return threadId;
}
public int getExceptionCode() {
return exceptionCode;
}
public void setExceptionCode(int exceptionCode) {
this.exceptionCode = exceptionCode;
}
public int getExceptionFlags() {
return exceptionFlags;
}
public void setExceptionFlags(int exceptionFlags) {
this.exceptionFlags = exceptionFlags;
}
public long getExceptionRecord() {
return exceptionRecord;
}
public void setExceptionRecord(long exceptionRecord) {
this.exceptionRecord = exceptionRecord;
}
public long getExceptionAddress() {
return exceptionAddress;
}
public void setExceptionAddress(long exceptionAddress) {
this.exceptionAddress = exceptionAddress;
}
public int getNumberOfParameters() {
return numberOfParameters;
}
public void setNumberOfParameters(int numberOfParameters) {
this.numberOfParameters = numberOfParameters;
}
public long getExceptionInformation(int idx) {
return exceptionInformation[idx];
}
public void setExceptionInformation(long exceptionInformation, int index) {
this.exceptionInformation[index] = exceptionInformation;
}
public void setContextDataSize(int contextDataSize) {
this.contextDataSize = contextDataSize;
}
public int getContextDataSize() {
return contextDataSize;
}
public void setContextRVA(int contextRVA) {
this.contextRVA = contextRVA;
}
public int getContextRVA() {
return contextRVA;
}
public StructureDataType getDefaultContext() {
return defaultContext;
}
}

View File

@ -0,0 +1,113 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class FunctionTable implements StructConverter {
public final static String NAME = "MINIDUMP_FUNCTION_TABLE";
private long minimumAddress;
private long maximumAddress;
private long baseAddress;
private int entryCount;
private int sizeOfAlignPad;
private DumpFileReader reader;
private long index;
FunctionTable(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setMinimumAddress(reader.readNextLong());
setMaximumAddress(reader.readNextLong());
setBaseAddress(reader.readNextLong());
setEntryCount(reader.readNextInt());
setSizeOfAlignPad(reader.readNextInt());
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(QWORD, 8, "MinimumAddress", null);
struct.add(QWORD, 8, "MaximumAddress", null);
struct.add(QWORD, 8, "BaseAddress", null);
struct.add(DWORD, 4, "EntryCount", null);
struct.add(DWORD, 4, "SizeOfAlignPad", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public long getMinimumAddress() {
return minimumAddress;
}
public void setMinimumAddress(long minimumAddress) {
this.minimumAddress = minimumAddress;
}
public long getMaximumAddress() {
return maximumAddress;
}
public void setMaximumAddress(long maximumAddress) {
this.maximumAddress = maximumAddress;
}
public long getBaseAddress() {
return baseAddress;
}
public void setBaseAddress(long baseAddress) {
this.baseAddress = baseAddress;
}
public int getEntryCount() {
return entryCount;
}
public void setEntryCount(int entryCount) {
this.entryCount = entryCount;
}
public int getSizeOfAlignPad() {
return sizeOfAlignPad;
}
public void setSizeOfAlignPad(int sizeOfAlignPad) {
this.sizeOfAlignPad = sizeOfAlignPad;
}
}

View File

@ -0,0 +1,138 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class FunctionTableStream implements StructConverter {
public final static String NAME = "MINIDUMP_FUNCTION_TABLES";
private int sizeOfHeader;
private int sizeOfEntry;
private int sizeOfNativeDescriptor;
private int sizeOfFunctionEntry;
private int numberOfDescriptors;
private int sizeOfAlignPad;
private FunctionTable[] descriptors;
private DumpFileReader reader;
private long index;
FunctionTableStream(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setSizeOfHeader(reader.readNextInt());
setSizeOfEntry(reader.readNextInt());
setSizeOfNativeDescriptor(reader.readNextInt());
setSizeOfFunctionEntry(reader.readNextInt());
setNumberOfDescriptors(reader.readNextInt());
setSizeOfAlignPad(reader.readNextInt());
descriptors = new FunctionTable[numberOfDescriptors];
for (int i = 0; i < numberOfDescriptors; i++) {
setDescriptors(new FunctionTable(reader, reader.getPointerIndex()), i);
}
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "SizeOfHeader", null);
struct.add(DWORD, 4, "SizeOfDescriptor", null);
struct.add(DWORD, 4, "SizeOfNativeDescriptor", null);
struct.add(DWORD, 4, "SizeOfFunctionEntry", null);
struct.add(DWORD, 4, "NumberOfDescriptors", null);
struct.add(DWORD, 4, "SizeOfAlignPad", null);
DataType t = descriptors[0].toDataType();
ArrayDataType a = new ArrayDataType(t, numberOfDescriptors, t.getLength());
struct.add(a, a.getLength(), "Descriptors", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public int getSizeOfHeader() {
return sizeOfHeader;
}
public void setSizeOfHeader(int sizeOfHeader) {
this.sizeOfHeader = sizeOfHeader;
}
public int getSizeOfEntry() {
return sizeOfEntry;
}
public void setSizeOfEntry(int sizeOfEntry) {
this.sizeOfEntry = sizeOfEntry;
}
public int getSizeOfNativeDescriptor() {
return sizeOfNativeDescriptor;
}
public void setSizeOfNativeDescriptor(int sizeOfNativeDescriptor) {
this.sizeOfNativeDescriptor = sizeOfNativeDescriptor;
}
public int getSizeOfFunctionEntry() {
return sizeOfFunctionEntry;
}
public void setSizeOfFunctionEntry(int sizeOfFunctionEntry) {
this.sizeOfFunctionEntry = sizeOfFunctionEntry;
}
public int getNumberOfDescriptors() {
return numberOfDescriptors;
}
public void setNumberOfDescriptors(int numberOfDescriptors) {
this.numberOfDescriptors = numberOfDescriptors;
}
public FunctionTable getDescriptors(int idx) {
return descriptors[idx];
}
public void setDescriptors(FunctionTable descriptor, int index) {
this.descriptors[index] = descriptor;
}
public void setSizeOfAlignPad(int sizeOfAlignPad) {
this.sizeOfAlignPad = sizeOfAlignPad;
}
public int getSizeOfAlignPad() {
return sizeOfAlignPad;
}
}

View File

@ -0,0 +1,155 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class Handle implements StructConverter {
public final static String NAME = "MINIDUMP_HANDLE";
private long handle;
private int typeNameRVA;
private int objectNameRVA;
private int attributes;
private int GrantedAccess;
private int HandleCount;
private int PointerCount;
private int ObjectInfoRva;
private DumpFileReader reader;
private long index;
private int entrySize;
private boolean expandedFormat;
Handle(DumpFileReader reader, long index, int entrySize) throws IOException {
this.reader = reader;
this.index = index;
this.entrySize = entrySize;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setHandle(reader.readNextLong());
setTypeNameRVA(reader.readNextInt());
setObjectNameRVA(reader.readNextInt());
setAttributes(reader.readNextInt());
setGrantedAccess(reader.readNextInt());
setHandleCount(reader.readNextInt());
setPointerCount(reader.readNextInt());
expandedFormat = entrySize > reader.getPointerIndex() - index;
if (expandedFormat) {
setObjectInfoRva(reader.readNextInt());
reader.readNextInt();
}
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(QWORD, 8, "Handle", null);
struct.add(Pointer32DataType.dataType, 4, "TypeNameRVA", null);
struct.add(Pointer32DataType.dataType, 4, "ObjectNameRVA", null);
struct.add(DWORD, 4, "Attributes", null);
struct.add(DWORD, 4, "GrantedAccess", null);
struct.add(DWORD, 4, "HandleCount", null);
struct.add(DWORD, 4, "PointerCount", null);
if (expandedFormat) {
struct.add(Pointer32DataType.dataType, 4, "ObjectInfoRva", null);
struct.add(DWORD, 4, "Reserved0", null);
}
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public long getHandle() {
return handle;
}
public void setHandle(long handle) {
this.handle = handle;
}
public int getTypeNameRVA() {
return typeNameRVA;
}
public void setTypeNameRVA(int typeNameRVA) {
this.typeNameRVA = typeNameRVA;
}
public int getObjectNameRVA() {
return objectNameRVA;
}
public void setObjectNameRVA(int objectNameRVA) {
this.objectNameRVA = objectNameRVA;
}
public int getAttributes() {
return attributes;
}
public void setAttributes(int attributes) {
this.attributes = attributes;
}
public int getGrantedAccess() {
return GrantedAccess;
}
public void setGrantedAccess(int grantedAccess) {
GrantedAccess = grantedAccess;
}
public int getHandleCount() {
return HandleCount;
}
public void setHandleCount(int handleCount) {
HandleCount = handleCount;
}
public int getPointerCount() {
return PointerCount;
}
public void setPointerCount(int pointerCount) {
PointerCount = pointerCount;
}
public int getObjectInfoRva() {
return ObjectInfoRva;
}
public void setObjectInfoRva(int objectInfoRva) {
ObjectInfoRva = objectInfoRva;
}
}

View File

@ -0,0 +1,107 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class HandleDataStream implements StructConverter {
public final static String NAME = "MINIDUMP_HANDLE_DATA";
private int sizeOfHeader;
private int sizeOfDescriptor;
private int numberOfHandles;
private Handle[] handles;
private DumpFileReader reader;
private long index;
HandleDataStream(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setSizeOfHeader(reader.readNextInt());
setSizeOfDescriptor(reader.readNextInt());
setNumberOfHandles(reader.readNextInt());
reader.readNextInt();
handles = new Handle[numberOfHandles];
for (int i = 0; i < numberOfHandles; i++) {
setHandle(new Handle(reader, reader.getPointerIndex(), sizeOfDescriptor), i);
}
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "SizeOfHeader", null);
struct.add(DWORD, 4, "SizeOfDescriptor", null);
struct.add(DWORD, 4, "NumberOfHandles", null);
struct.add(DWORD, 4, "Reserved", null);
DataType t = handles[0].toDataType();
ArrayDataType a = new ArrayDataType(t, numberOfHandles, t.getLength());
struct.add(a, a.getLength(), "Handles", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public int getNumberOfHandles() {
return numberOfHandles;
}
public void setNumberOfHandles(int numberOfHandles) {
this.numberOfHandles = numberOfHandles;
}
public Handle getHandle(int idx) {
return handles[idx];
}
public void setHandle(Handle handle, int index) {
this.handles[index] = handle;
}
public void setSizeOfHeader(int sizeOfHeader) {
this.sizeOfHeader = sizeOfHeader;
}
public int getSizeOfHeader() {
return sizeOfHeader;
}
public void setSizeOfDescriptor(int sizeOfDescriptor) {
this.sizeOfDescriptor = sizeOfDescriptor;
}
public int getSizeOfDescriptor() {
return sizeOfDescriptor;
}
}

View File

@ -0,0 +1,105 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class HandleOperationListStream implements StructConverter {
public final static String NAME = "MINIDUMP_HANDLE_OPERATIONS";
private int sizeOfHeader;
private int sizeOfDescriptor;
private int numberOfHandles;
private Handle[] handleOperaions;
private DumpFileReader reader;
private long index;
HandleOperationListStream(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setSizeOfHeader(reader.readNextInt());
setSizeOfDescriptor(reader.readNextInt());
setNumberOfHandles(reader.readNextInt());
handleOperaions = new Handle[numberOfHandles];
for (int i = 0; i < numberOfHandles; i++) {
setHandle(new Handle(reader, reader.getPointerIndex(), sizeOfDescriptor), i);
}
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "SizeOfHeader", null);
struct.add(DWORD, 4, "SizeOfDescriptor", null);
struct.add(DWORD, 4, "NumberOfHandles", null);
DataType t = handleOperaions[0].toDataType();
ArrayDataType a = new ArrayDataType(t, numberOfHandles, t.getLength());
struct.add(a, a.getLength(), "Handles", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public int getNumberOfHandles() {
return numberOfHandles;
}
public void setNumberOfHandles(int numberOfHandles) {
this.numberOfHandles = numberOfHandles;
}
public Handle getHandle(int idx) {
return handleOperaions[idx];
}
public void setHandle(Handle handle, int index) {
this.handleOperaions[index] = handle;
}
public void setSizeOfHeader(int sizeOfHeader) {
this.sizeOfHeader = sizeOfHeader;
}
public int getSizeOfHeader() {
return sizeOfHeader;
}
public void setSizeOfDescriptor(int sizeOfDescriptor) {
this.sizeOfDescriptor = sizeOfDescriptor;
}
public int getSizeOfDescriptor() {
return sizeOfDescriptor;
}
}

View File

@ -0,0 +1,139 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class MdmpFileHeader implements StructConverter {
public final static String NAME = "MINIDUMP_HEADER";
private int signature;
private int version;
private int numberOfStreams;
private long streamDirectoryRVA;
private int checkSum;
private int timeDateStamp;
private long flags;
private DumpFileReader reader;
private long index;
MdmpFileHeader(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setSignature(reader.readNextInt());
setVersion(reader.readNextInt());
setNumberOfStreams(reader.readNextInt());
setStreamDirectoryRVA(reader.readNextInt());
setCheckSum(reader.readNextInt());
setTimeDateStamp(reader.readNextInt());
setFlags(reader.readNextLong());
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(STRING, 4, "Signature", null);
struct.add(STRING, 4, "Version", null);
struct.add(DWORD, 4, "NumberOfStreams", null);
struct.add(Pointer32DataType.dataType, 4, "StreamDirectoryRVA", null);
struct.add(DWORD, 4, "CheckSum", null);
UnionDataType union = new UnionDataType(NAME + "_u");
union.add(DWORD, 4, "Reserved", null);
union.add(DWORD, 4, "TimeDateStamp", null);
struct.add(union, 4, union.getDisplayName(), null);
struct.add(QWORD, 8, "Flags", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public void setSignature(int signature) {
this.signature = signature;
}
public int getSignature() {
return signature;
}
public void setVersion(int version) {
this.version = version;
}
public int getVersion() {
return version;
}
public void setNumberOfStreams(int numberOfStreams) {
this.numberOfStreams = numberOfStreams;
}
public int getNumberOfStreams() {
return numberOfStreams;
}
public void setStreamDirectoryRVA(long streamDirectoryRVA) {
this.streamDirectoryRVA = streamDirectoryRVA;
}
public long getStreamDirectoryRVA() {
return streamDirectoryRVA;
}
public void setCheckSum(int checkSum) {
this.checkSum = checkSum;
}
public int getCheckSum() {
return checkSum;
}
public void setTimeDateStamp(int timeDateStamp) {
this.timeDateStamp = timeDateStamp;
}
public int getTimeDateStamp() {
return timeDateStamp;
}
public void setFlags(long flags) {
this.flags = flags;
}
public long getFlags() {
return flags;
}
}

View File

@ -0,0 +1,95 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class Memory64ListStream implements StructConverter {
public final static String NAME = "MINIDUMP_MEMORY_RANGE_64_LIST";
private int numberOfMemoryRanges;
private long baseRVA;
private MemoryRange64[] memoryRanges;
private DumpFileReader reader;
private long index;
Memory64ListStream(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setNumberOfMemoryRanges((int) reader.readNextLong());
setBaseRVA(reader.readNextLong());
memoryRanges = new MemoryRange64[numberOfMemoryRanges];
for (int i = 0; i < numberOfMemoryRanges; i++) {
setMemoryRange(new MemoryRange64(reader, reader.getPointerIndex()), i);
}
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(QWORD, 8, "NumberOfMemoryRanges", null);
struct.add(POINTER, 8, "BaseRva", null);
DataType t = memoryRanges[0].toDataType();
ArrayDataType a = new ArrayDataType(t, numberOfMemoryRanges, t.getLength());
struct.add(a, a.getLength(), "MemoryRanges", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public long getNumberOfMemoryRanges() {
return numberOfMemoryRanges;
}
public void setNumberOfMemoryRanges(int numberOfMemoryRanges) {
this.numberOfMemoryRanges = numberOfMemoryRanges;
}
public void setBaseRVA(long baseRVA) {
this.baseRVA = baseRVA;
}
public long getBaseRVA() {
return baseRVA;
}
public MemoryRange64 getMemoryRange(int idx) {
return memoryRanges[idx];
}
public void setMemoryRange(MemoryRange64 memoryRange, int index) {
this.memoryRanges[index] = memoryRange;
}
}

View File

@ -0,0 +1,161 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class MemoryInfo implements StructConverter {
public final static String NAME = "MINIDUMP_MEMORY_INFO";
private long baseAddress;
private long allocationBase;
private int allocationProtect;
private long regionSize;
private int state;
private int protect;
private int type;
private DumpFileReader reader;
private long index;
MemoryInfo(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setBaseAddress(reader.readNextLong());
setAllocationBase(reader.readNextLong());
setAllocationProtect(reader.readNextInt());
reader.readNextInt();
setRegionSize(reader.readNextLong());
setState(reader.readNextInt());
setProtect(reader.readNextInt());
setType(reader.readNextInt());
reader.readNextInt();
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(QWORD, 8, "BaseAddress", null);
struct.add(QWORD, 8, "AllocationBase", null);
struct.add(DWORD, 4, "AllocationProtect", null);
struct.add(DWORD, 4, "__alignment1", null);
struct.add(QWORD, 8, "RegionSize", null);
struct.add(DWORD, 4, "State", null);
struct.add(DWORD, 4, "Protect", null);
struct.add(DWORD, 4, "Type", null);
struct.add(DWORD, 4, "__alignment2", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public long getBaseAddress() {
return baseAddress;
}
public void setBaseAddress(long baseAddress) {
this.baseAddress = baseAddress;
}
public long getAllocationBase() {
return allocationBase;
}
public void setAllocationBase(long allocationBase) {
this.allocationBase = allocationBase;
}
public int getAllocationProtect() {
return allocationProtect;
}
public void setAllocationProtect(int allocationProtect) {
this.allocationProtect = allocationProtect;
}
public long getRegionSize() {
return regionSize;
}
public void setRegionSize(long regionSize) {
this.regionSize = regionSize;
}
public int getState() {
return state;
}
public void setState(int state) {
this.state = state;
}
public int getProtect() {
return protect;
}
public void setProtect(int protect) {
this.protect = protect;
}
public int getType() {
return type;
}
public void setType(int type) {
this.type = type;
}
public String getComment() {
String comment = "";
if ((state & 0x1000) > 0) {
comment += "COMMIT ";
}
if ((state & 0x10000) > 0) {
comment += "FREE ";
}
if ((state & 0x2000) > 0) {
comment += "RESERVE ";
}
if ((type & 0x1000000) > 0) {
comment += "IMAGE ";
}
if ((type & 0x40000) > 0) {
comment += "MAPPED ";
}
if ((type & 0x20000) > 0) {
comment += "PRIVATE ";
}
return comment;
}
}

View File

@ -0,0 +1,106 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class MemoryInfoListStream implements StructConverter {
public final static String NAME = "MINIDUMP_MEMORY_INFO_LIST";
private int sizeOfHeader;
private int sizeOfEntry;
private int numberOfEntries;
private MemoryInfo[] memoryInfo;
private DumpFileReader reader;
private long index;
MemoryInfoListStream(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setSizeOfHeader(reader.readNextInt());
setSizeOfEntry(reader.readNextInt());
setNumberOfEntries((int) reader.readNextLong());
memoryInfo = new MemoryInfo[numberOfEntries];
for (int i = 0; i < numberOfEntries; i++) {
setMemoryInfo(new MemoryInfo(reader, reader.getPointerIndex()), i);
}
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "SizeOfHeader", null);
struct.add(DWORD, 4, "SizeOfEntry", null);
struct.add(QWORD, 8, "NumberOfMemoryRanges", null);
DataType t = memoryInfo[0].toDataType();
ArrayDataType a = new ArrayDataType(t, numberOfEntries, t.getLength());
struct.add(a, a.getLength(), "MemoryRanges", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public void setSizeOfHeader(int sizeOfHeader) {
this.sizeOfHeader = sizeOfHeader;
}
public int getSizeOfHeader() {
return sizeOfHeader;
}
public void setSizeOfEntry(int sizeOfEntry) {
this.sizeOfEntry = sizeOfEntry;
}
public int getSizeOfEntry() {
return sizeOfEntry;
}
public void setNumberOfEntries(int numberOfEntries) {
this.numberOfEntries = numberOfEntries;
}
public long getNumberOfEntries() {
return numberOfEntries;
}
public MemoryInfo getMemoryInfo(int idx) {
return memoryInfo[idx];
}
public void setMemoryInfo(MemoryInfo memoryInfo, int index) {
this.memoryInfo[index] = memoryInfo;
}
}

View File

@ -0,0 +1,83 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class MemoryListStream implements StructConverter {
public final static String NAME = "MINIDUMP_MEMORY_RANGE_LIST";
private int numberOfMemoryRanges;
private MemoryRange[] memoryRanges;
private DumpFileReader reader;
private long index;
MemoryListStream(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setNumberOfMemoryRanges(reader.readNextInt());
memoryRanges = new MemoryRange[numberOfMemoryRanges];
for (int i = 0; i < numberOfMemoryRanges; i++) {
setMemoryRange(new MemoryRange(reader, reader.getPointerIndex()), i);
}
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "NumberOfMemoryRanges", null);
DataType t = memoryRanges[0].toDataType();
ArrayDataType a = new ArrayDataType(t, numberOfMemoryRanges, t.getLength());
struct.add(a, a.getLength(), "MemoryRanges", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public int getNumberOfMemoryRanges() {
return numberOfMemoryRanges;
}
public void setNumberOfMemoryRanges(int numberOfMemoryRanges) {
this.numberOfMemoryRanges = numberOfMemoryRanges;
}
public MemoryRange getMemoryRange(int idx) {
return memoryRanges[idx];
}
public void setMemoryRange(MemoryRange memoryRange, int index) {
this.memoryRanges[index] = memoryRange;
}
}

View File

@ -0,0 +1,90 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class MemoryRange implements StructConverter {
public final static String NAME = "MINIDUMP_MEMORY_RANGE";
private long startOfMemoryRange;
private int dataSize;
private int RVA;
private DumpFileReader reader;
private long index;
MemoryRange(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setStartOfMemoryRange(reader.readNextLong());
setDataSize(reader.readNextInt());
setRVA(reader.readNextInt());
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(QWORD, 8, "StartOfMemoryRange", null);
struct.add(DWORD, 4, "DataSize", null);
struct.add(Pointer32DataType.dataType, 4, "RVA", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public long getStartOfMemoryRange() {
return startOfMemoryRange;
}
public void setStartOfMemoryRange(long startOfMemoryRange) {
this.startOfMemoryRange = startOfMemoryRange;
}
public int getDataSize() {
return dataSize;
}
public void setDataSize(int dataSize) {
this.dataSize = dataSize;
}
public int getRVA() {
return RVA;
}
public void setRVA(int rva) {
RVA = rva;
}
}

View File

@ -0,0 +1,86 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class MemoryRange64 implements StructConverter {
public final static String NAME = "MINIDUMP_MEMORY_RANGE_64";
private long startOfMemoryRange;
private long dataSize;
// MemoryRange64 is used for full-memory minidumps where
// all of the raw memory is laid out sequentially at the
// end of the dump. There is no need for individual RVAs
// as the RVA is the base RVA plus the sum of the preceeding
// data blocks.
private DumpFileReader reader;
private long index;
MemoryRange64(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setStartOfMemoryRange(reader.readNextLong());
setDataSize(reader.readNextLong());
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(QWORD, 8, "StartOfMemoryRange", null);
struct.add(QWORD, 8, "DataSize", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public long getStartOfMemoryRange() {
return startOfMemoryRange;
}
public void setStartOfMemoryRange(long startOfMemoryRange) {
this.startOfMemoryRange = startOfMemoryRange;
}
public long getDataSize() {
return dataSize;
}
public void setDataSize(long dataSize) {
this.dataSize = dataSize;
}
}

View File

@ -0,0 +1,448 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import ghidra.app.util.Option;
import ghidra.app.util.OptionUtils;
import ghidra.app.util.bin.StructConverter;
import ghidra.app.util.opinion.PeLoader;
import ghidra.file.formats.dump.*;
import ghidra.file.formats.dump.cmd.ModuleToPeHelper;
import ghidra.framework.options.Options;
import ghidra.program.model.data.*;
import ghidra.program.model.listing.Program;
import ghidra.util.Msg;
import ghidra.util.task.TaskMonitor;
public class Minidump extends DumpFile {
public static final int SIGNATURE = 0x504D444D; // "PAGE"
MdmpFileHeader header;
Directory[] dirs;
HashMap<Integer, StructConverter> streams = new HashMap<Integer, StructConverter>();
private boolean createBlocks;
public Minidump(DumpFileReader reader, ProgramBasedDataTypeManager dtm, List<Option> options,
TaskMonitor monitor) {
super(reader, dtm, options, monitor);
Options props = program.getOptions(Program.PROGRAM_INFO);
props.setString("Executable Format", PeLoader.PE_NAME);
initManagerList(null);
createBlocks =
OptionUtils.getBooleanOptionValue(DumpFileLoader.CREATE_MEMORY_BLOCKS_OPTION_NAME,
options, DumpFileLoader.CREATE_MEMORY_BLOCKS_OPTION_DEFAULT);
try {
header = new MdmpFileHeader(reader, 0L);
data.add(new DumpData(0, header.toDataType()));
dirs = new Directory[header.getNumberOfStreams()];
for (int i = 0; i < dirs.length; i++) {
dirs[i] = new Directory(reader, reader.getPointerIndex());
}
for (Directory dir : dirs) {
long rva = dir.getRVA();
StructConverter sv = null;
switch (dir.getStreamType()) {
case 3:
sv = new ThreadListStream(reader, rva);
break;
case 4:
sv = new ModuleListStream(reader, rva);
break;
case 5:
sv = new MemoryListStream(reader, rva);
break;
case 6:
sv = new ExceptionStream(reader, rva);
break;
case 7:
sv = new SystemInfoStream(reader, rva);
break;
case 8:
sv = new ThreadExListStream(reader, rva);
break;
case 9:
sv = new Memory64ListStream(reader, rva);
break;
case 10:
sv = new CommentStreamA(reader, rva);
break;
case 11:
sv = new CommentStreamW(reader, rva);
break;
case 12:
sv = new HandleDataStream(reader, rva);
break;
case 13:
sv = new FunctionTableStream(reader, rva);
break;
case 14:
sv = new UnloadedModuleListStream(reader, rva);
break;
case 15:
sv = new MiscInfoStream(reader, rva);
break;
case 16:
sv = new MemoryInfoListStream(reader, rva);
break;
case 17:
sv = new ThreadInfoListStream(reader, rva);
break;
case 18:
sv = new HandleOperationListStream(reader, rva);
break;
case 19:
sv = new TokenListStream(reader, rva);
break;
case 20:
//sv = new JavaScriptDataStream(reader, rva);
break;
case 21:
sv = new SystemMemoryInfoStream(reader, rva);
break;
case 22:
sv = new ProcessVmCountersStream(reader, rva);
break;
case 23:
//sv = new IptTraceStream(reader, rva);
break;
case 24:
//sv = new ThreadNamesStream(reader, rva);
break;
}
if (sv != null) {
streams.put(dir.getStreamType(), sv);
}
}
buildStructures();
}
catch (Exception e) {
Msg.error(this, e.getMessage());
}
}
private void buildStructures() throws Exception {
long offset = header.toDataType().getLength();
long headerMax = offset;
DataType dt = dirs[0].toDataType();
data.add(new DumpData(offset, "DIRECTORIES", dt.getLength() * dirs.length));
for (int i = 0; i < dirs.length; ++i) {
offset = header.getStreamDirectoryRVA() + i * dt.getLength();
data.add(new DumpData(offset, dirs[i].toDataType(), "", false, false));
if (offset + i * dt.getLength() > headerMax) {
headerMax = offset + i * dt.getLength();
}
}
for (int i = 0; i < dirs.length; ++i) {
offset = dirs[i].getRVA();
if (offset > 0) {
StructConverter stream = getStreamByDir(i);
if (stream != null) {
dt = stream.toDataType();
data.add(new DumpData(offset, dt, dirs[i].getReadableName(), false, true));
if (offset + dt.getLength() > headerMax) {
headerMax = offset + dt.getLength();
}
}
}
}
// Compute upper bound for header block
StructConverter sv = getStreamByType(Directory.MEMORY_LIST_STREAM);
if (sv != null) {
MemoryListStream memstr = (MemoryListStream) sv;
for (int i = 0; i < memstr.getNumberOfMemoryRanges(); i++) {
MemoryRange memoryRange = memstr.getMemoryRange(i);
if (memoryRange.getStartOfMemoryRange() < headerMax) {
headerMax = memoryRange.getStartOfMemoryRange();
}
}
}
sv = getStreamByType(Directory.MEMORY64_LIST_STREAM);
if (sv != null) {
Memory64ListStream memstr = (Memory64ListStream) sv;
for (int i = 0; i < memstr.getNumberOfMemoryRanges(); i++) {
MemoryRange64 memoryRange = memstr.getMemoryRange(i);
if (memoryRange.getStartOfMemoryRange() < headerMax) {
headerMax = memoryRange.getStartOfMemoryRange();
}
}
}
addInteriorAddressObject("DumpHeader", 0, 0L, headerMax);
sv = getStreamByType(Directory.MODULE_LIST_STREAM);
if (sv != null) {
ModuleListStream modstr = (ModuleListStream) sv;
for (int i = 0; i < modstr.getNumberOfModules(); i++) {
Module mod = modstr.getModule(i);
offset = mod.getModuleNameRVA();
data.add(new DumpData(offset, StructConverter.DWORD, "", false, false));
int len = reader.readInt(offset);
offset += 4;
DumpData dd =
new DumpData(offset, new TerminatedUnicodeDataType(), "", false, false);
dd.setSize(len + 2);
data.add(dd);
String moduleName = reader.readUnicodeString(offset, len / 2);
addModule(moduleName, mod.getBaseOfImage(), i, mod.getSizeOfImage());
addExteriorAddressObject(moduleName, 0, mod.getBaseOfImage(), mod.getSizeOfImage());
CvRecord cvRecord = mod.getCvRecord();
offset = mod.getCvRecordRVA();
dt = cvRecord.toDataType();
data.add(new DumpData(offset, dt, "", false, false));
offset += dt.getLength();
data.add(new DumpData(offset, new StringDataType(), "", false, false));
}
}
sv = getStreamByType(Directory.UNLOADED_MODULE_LIST_STREAM);
if (sv != null) {
UnloadedModuleListStream modstr = (UnloadedModuleListStream) sv;
for (int i = 0; i < modstr.getNumberOfEntries(); i++) {
UnloadedModule mod = modstr.getEntry(i);
offset = mod.getModuleNameRVA();
data.add(new DumpData(offset, StructConverter.DWORD, "", false, false));
int len = reader.readInt(offset);
offset += 4;
DumpData dd =
new DumpData(offset, new TerminatedUnicodeDataType(), "", false, false);
dd.setSize(len + 2);
data.add(dd);
String moduleName = reader.readUnicodeString(offset, len / 2);
addModule(moduleName, mod.getBaseOfImage(), i, mod.getSizeOfImage());
addExteriorAddressObject(moduleName, 0, mod.getBaseOfImage(), mod.getSizeOfImage());
}
}
if (createBlocks) {
sv = getStreamByType(Directory.MEMORY_LIST_STREAM);
if (sv != null) {
MemoryListStream memstr = (MemoryListStream) sv;
for (int i = 0; i < memstr.getNumberOfMemoryRanges(); i++) {
MemoryRange memoryRange = memstr.getMemoryRange(i);
offset = memoryRange.getRVA();
addInteriorAddressObject(DumpFileLoader.MEMORY, memoryRange.getRVA(),
memoryRange.getStartOfMemoryRange(), memoryRange.getDataSize());
}
}
sv = getStreamByType(Directory.MEMORY64_LIST_STREAM);
if (sv != null) {
Memory64ListStream memstr = (Memory64ListStream) sv;
offset = (int) memstr.getBaseRVA();
for (int i = 0; i < memstr.getNumberOfMemoryRanges(); i++) {
MemoryRange64 memoryRange = memstr.getMemoryRange(i);
addInteriorAddressObject(DumpFileLoader.MEMORY, (int) offset,
memoryRange.getStartOfMemoryRange(), memoryRange.getDataSize());
offset += memoryRange.getDataSize();
}
}
}
sv = getStreamByType(Directory.MEMORY_INFO_LIST_STREAM);
if (sv != null) {
MemoryInfoListStream meminfostr = (MemoryInfoListStream) sv;
for (int i = 0; i < meminfostr.getNumberOfEntries(); i++) {
MemoryInfo memoryInfo = meminfostr.getMemoryInfo(i);
DumpAddressObject dao = getInteriorAddressObject(memoryInfo.getBaseAddress());
if (dao != null) {
dao.setRead((memoryInfo.getProtect() & 0x66) > 0);
dao.setWrite((memoryInfo.getProtect() & 0xCC) > 0);
dao.setExec((memoryInfo.getProtect() & 0xF0) > 0);
dao.setComment(memoryInfo.getComment());
}
}
}
sv = getStreamByType(Directory.THREAD_LIST_STREAM);
if (sv != null) {
ThreadListStream tstr = (ThreadListStream) sv;
for (int i = 0; i < tstr.getNumberOfThreads(); i++) {
Thread t = tstr.getThread(i);
String tid = Integer.toHexString(t.getThreadId());
offset = t.getContextRVA();
if (offset != 0) {
if (contextOffset == 0) {
contextOffset = offset;
}
CategoryPath path = new CategoryPath("/winnt.h");
dt = getTypeFromArchive(path, "CONTEXT");
if (dt != null) {
data.add(new DumpData(offset, dt, "ThreadContext_" + tid, false, true));
setProgramContext(offset, dt, tid);
}
}
offset = t.getStackRVA();
if (createBlocks && offset != 0) {
addInteriorAddressObject("ThreadStack_" + tid, (int) offset,
t.getStackStartOfMemoryRange(), t.getStackDataSize());
}
}
}
sv = getStreamByType(Directory.THREAD_EX_LIST_STREAM);
if (sv != null) {
ThreadExListStream tstr = (ThreadExListStream) sv;
for (int i = 0; i < tstr.getNumberOfThreads(); i++) {
ThreadEx t = tstr.getThread(i);
String tid = Integer.toHexString(t.getThreadId());
offset = t.getContextRVA();
if (offset != 0) {
CategoryPath path = new CategoryPath("/winnt.h");
dt = getTypeFromArchive(path, "CONTEXT");
if (dt != null) {
data.add(new DumpData(offset, dt, "ThreadContext_" + tid, false, true));
setProgramContext(offset, dt, tid);
}
}
offset = t.getStackRVA();
if (createBlocks && offset != 0) {
addInteriorAddressObject("ThreadStack_" + tid, (int) offset,
t.getStackStartOfMemoryRange(), t.getStackDataSize());
}
}
}
sv = getStreamByType(Directory.HANDLE_LIST_STREAM);
if (sv != null) {
HandleDataStream handles = (HandleDataStream) sv;
for (int i = 0; i < handles.getNumberOfHandles(); i++) {
Handle handle = handles.getHandle(i);
offset = handle.getTypeNameRVA();
if (offset > 0) {
data.add(new DumpData(offset, StructConverter.DWORD, "", false, false));
int len = reader.readInt(offset);
offset += 4;
DumpData ddType =
new DumpData(offset, new TerminatedUnicodeDataType(), "", false, false);
ddType.setSize(len + 2);
data.add(ddType);
}
offset = handle.getObjectNameRVA();
if (offset > 0) {
data.add(new DumpData(offset, StructConverter.DWORD, "", false, false));
int len = reader.readInt(offset);
offset += 4;
DumpData ddObj =
new DumpData(offset, new TerminatedUnicodeDataType(), "", false, false);
ddObj.setSize(len + 2);
data.add(ddObj);
}
}
}
sv = getStreamByType(Directory.SYSTEM_INFO_STREAM);
if (sv != null) {
SystemInfoStream sistr = (SystemInfoStream) sv;
offset = sistr.getCSDVersionRVA();
data.add(new DumpData(offset, StructConverter.DWORD, "", false, false));
int len = reader.readInt(offset);
offset += 4;
DumpData dd =
new DumpData(offset, new TerminatedUnicodeDataType(), "", false, false);
dd.setSize(len + 2);
data.add(dd);
}
sv = getStreamByType(Directory.MISC_INFO_STREAM);
if (sv != null) {
MiscInfoStream mistr = (MiscInfoStream) sv;
processId = mistr.getProcessId();
addProcess(processId, "TARGET", 0);
}
sv = getStreamByType(Directory.EXCEPTION_STREAM);
if (sv != null) {
ExceptionStream xstr = (ExceptionStream) sv;
offset = xstr.getContextRVA();
contextOffset = offset;
dt = xstr.getDefaultContext();
data.add(new DumpData(offset, dt));
threadId = xstr.getThreadId();
addThread(processId, threadId, 0);
}
}
public MdmpFileHeader getFileHeader() {
return header;
}
public Directory[] getDirectories() {
return dirs;
}
public StructConverter getStreamByType(int type) {
return streams.get(type);
}
public StructConverter getStreamByDir(int dirIndex) {
return streams.get(dirs[dirIndex].getStreamType());
}
public static String getMachineType(DumpFileReader reader) throws IOException {
MdmpFileHeader header = new MdmpFileHeader(reader, 0L);
Directory[] dirs = new Directory[header.getNumberOfStreams()];
for (int i = 0; i < dirs.length; i++) {
dirs[i] = new Directory(reader, reader.getPointerIndex());
}
for (Directory dir : dirs) {
long rva = dir.getRVA();
switch (dir.getStreamType()) {
case 7:
SystemInfoStream sv = new SystemInfoStream(reader, rva);
return Integer.toString(sv.getProcessorArchitecture());
}
}
return "0";
}
@Override
public void analyze(TaskMonitor monitor) {
boolean analyzeEmbeddedObjects =
OptionUtils.getBooleanOptionValue(DumpFileLoader.ANALYZE_EMBEDDED_OBJECTS_OPTION_NAME,
options,
false);
if (analyzeEmbeddedObjects) {
ModuleToPeHelper.queryModules(program, monitor);
}
}
}

View File

@ -0,0 +1,355 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class MiscInfoStream implements StructConverter {
public final static String NAME = "MINIDUMP_MISC_INFO";
private int sizeOfInfo;
private int flags1;
private int processId;
private int processCreateTime;
private int processUserTime;
private int processKernelTime;
private int processorMaxMhz;
private int processorCurrentMhz;
private int processorMhzLimit;
private int processorMaxIdleState;
private int processorCurrentIdleState;
private int processIntegrityLevel;
private int processExecuteFlags;
private int protectedProcess;
private int timeZoneId;
private int bias;
private String standardName; //[32]
//SYSTEMTIME standardDate;
private int standardBias;
private String daylightName; //[32]
//SYSTEMTIME daylightDate;
private int daylightBias;
/*
WORD wYear;
WORD wMonth;
WORD wDayOfWeek;
WORD wDay;
WORD wHour;
WORD wMinute;
WORD wSecond;
WORD wMilliseconds;
*/
private String buildString;
private String dbgBuildStr;
private DumpFileReader reader;
private long index;
private boolean format2, format3, format4;
MiscInfoStream(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setSizeOfInfo(reader.readNextInt());
setFlags1(reader.readNextInt());
setProcessId(reader.readNextInt());
setProcessCreateTime(reader.readNextInt());
setProcessUserTime(reader.readNextInt());
setProcessKernelTime(reader.readNextInt());
format2 = sizeOfInfo > reader.getPointerIndex() - index;
if (format2) {
setProcessorMaxMhz(reader.readNextInt());
setProcessorCurrentMhz(reader.readNextInt());
setProcessorMhzLimit(reader.readNextInt());
setProcessorMaxIdleState(reader.readNextInt());
setProcessorCurrentIdleState(reader.readNextInt());
}
format3 = sizeOfInfo > reader.getPointerIndex() - index;
if (format3) {
setProcessIntegrityLevel(reader.readNextInt());
setProcessExecuteFlags(reader.readNextInt());
setProcessorMhzLimit(reader.readNextInt());
setProtectedProcess(reader.readNextInt());
setTimeZoneId(reader.readNextInt());
setBias(reader.readNextInt());
setStandardName(reader.readNextUnicodeString());
reader.readNextLong();
reader.readNextLong();
setStandardBias(reader.readNextInt());
setDaylightName(reader.readNextUnicodeString());
reader.readNextLong();
reader.readNextLong();
setTimeZoneId(reader.readNextInt());
setDaylightBias(reader.readNextInt());
}
format4 = sizeOfInfo > reader.getPointerIndex() - index;
if (format4) {
setProtectedProcess(reader.readNextInt());
setTimeZoneId(reader.readNextInt());
}
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "SizeOfInfo", null);
struct.add(DWORD, 4, "Flags1", null);
struct.add(DWORD, 4, "ProcessId", null);
struct.add(DWORD, 4, "ProcessCreateTime", null);
struct.add(DWORD, 4, "ProcessUserTime", null);
struct.add(DWORD, 4, "ProcessKernelTime", null);
if (format2) {
struct.add(DWORD, 4, "ProcessorMaxMhz", null);
struct.add(DWORD, 4, "ProcessorCurrentMhz", null);
struct.add(DWORD, 4, "ProcessorMhzLimit", null);
struct.add(DWORD, 4, "ProcessorMaxIdleState", null);
struct.add(DWORD, 4, "ProcessorCurrentIdleState", null);
}
if (format3) {
struct.add(DWORD, 4, "ProcessIntegrityLevel", null);
struct.add(DWORD, 4, "ProcessExecuteFlags", null);
struct.add(DWORD, 4, "ProtectedProcess", null);
struct.add(DWORD, 4, "TimeZoneId", null);
StructureDataType s00 = new StructureDataType("SYSTEM_TIME", 0);
s00.add(WORD, 2, "Year", null);
s00.add(WORD, 2, "Month", null);
s00.add(WORD, 2, "DayOfWeek", null);
s00.add(WORD, 2, "Day", null);
s00.add(WORD, 2, "Hour", null);
s00.add(WORD, 2, "Minute", null);
s00.add(WORD, 2, "Second", null);
s00.add(WORD, 2, "Milliseconds", null);
StructureDataType s0 = new StructureDataType("TIME_ZONE_INFORMATION", 0);
s0.add(DWORD, 4, "Bias", null);
s0.add(UTF16, 64, "StandardName", null);
s0.add(s00, s00.getLength(), "StandardDate", null);
s0.add(DWORD, 4, "StandardBias", null);
s0.add(UTF16, 64, "DaylightName", null);
s0.add(s00, s00.getLength(), "DaylightDate", null);
s0.add(DWORD, 4, "DaylightBias", null);
struct.add(s0, s0.getLength(), "TimeZone", null);
}
if (format4) {
struct.add(UTF16, 256, "BuildString", null);
struct.add(UTF16, 40, "DbgBldStr", null);
}
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public int getSizeOfInfo() {
return sizeOfInfo;
}
public void setSizeOfInfo(int sizeOfInfo) {
this.sizeOfInfo = sizeOfInfo;
}
public int getFlags1() {
return flags1;
}
public void setFlags1(int flags1) {
this.flags1 = flags1;
}
public int getProcessId() {
return processId;
}
public void setProcessId(int processId) {
this.processId = processId;
}
public int getProcessCreateTime() {
return processCreateTime;
}
public void setProcessCreateTime(int processCreateTime) {
this.processCreateTime = processCreateTime;
}
public int getProcessUserTime() {
return processUserTime;
}
public void setProcessUserTime(int processUserTime) {
this.processUserTime = processUserTime;
}
public int getProcessKernelTime() {
return processKernelTime;
}
public void setProcessKernelTime(int processKernelTime) {
this.processKernelTime = processKernelTime;
}
public int getProcessorMaxMhz() {
return processorMaxMhz;
}
public void setProcessorMaxMhz(int processorMaxMhz) {
this.processorMaxMhz = processorMaxMhz;
}
public int getProcessorCurrentMhz() {
return processorCurrentMhz;
}
public void setProcessorCurrentMhz(int processorCurrentMhz) {
this.processorCurrentMhz = processorCurrentMhz;
}
public int getProcessorMhzLimit() {
return processorMhzLimit;
}
public void setProcessorMhzLimit(int processorMhzLimit) {
this.processorMhzLimit = processorMhzLimit;
}
public int getProcessorMaxIdleState() {
return processorMaxIdleState;
}
public void setProcessorMaxIdleState(int processorMaxIdleState) {
this.processorMaxIdleState = processorMaxIdleState;
}
public int getProcessorCurrentIdleState() {
return processorCurrentIdleState;
}
public void setProcessorCurrentIdleState(int processorCurrentIdleState) {
this.processorCurrentIdleState = processorCurrentIdleState;
}
public int getProcessIntegrityLevel() {
return processIntegrityLevel;
}
public void setProcessIntegrityLevel(int processIntegrityLevel) {
this.processIntegrityLevel = processIntegrityLevel;
}
public int getProcessExecuteFlags() {
return processExecuteFlags;
}
public void setProcessExecuteFlags(int processExecuteFlags) {
this.processExecuteFlags = processExecuteFlags;
}
public int getProtectedProcess() {
return protectedProcess;
}
public void setProtectedProcess(int protectedProcess) {
this.protectedProcess = protectedProcess;
}
public int getTimeZoneId() {
return timeZoneId;
}
public void setTimeZoneId(int timeZoneId) {
this.timeZoneId = timeZoneId;
}
public int getBias() {
return bias;
}
public void setBias(int bias) {
this.bias = bias;
}
public String getStandardName() {
return standardName;
}
public void setStandardName(String standardName) {
this.standardName = standardName;
}
public int getStandardBias() {
return standardBias;
}
public void setStandardBias(int standardBias) {
this.standardBias = standardBias;
}
public String getDaylightName() {
return daylightName;
}
public void setDaylightName(String daylightName) {
this.daylightName = daylightName;
}
public int getDaylightBias() {
return daylightBias;
}
public void setDaylightBias(int daylightBias) {
this.daylightBias = daylightBias;
}
public String getBuildString() {
return buildString;
}
public void setBuildString(String buildString) {
this.buildString = buildString;
}
public String getDbgBuildStr() {
return dbgBuildStr;
}
public void setDbgBuildStr(String dbgBuildStr) {
this.dbgBuildStr = dbgBuildStr;
}
}

View File

@ -0,0 +1,347 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class Module implements StructConverter {
public final static String NAME = "MINIDUMP_MODULE";
private long baseOfImage;
private int sizeOfImage;
private int checkSum;
private int timeDateStamp;
private int moduleNameRVA;
private int dwSignature; /* e.g. 0xfeef04bd */
private int dwStrucVersion; /* e.g. 0x00000042 = "0.42" */
private int dwFileVersionMS; /* e.g. 0x00030075 = "3.75" */
private int dwFileVersionLS; /* e.g. 0x00000031 = "0.31" */
private int dwProductVersionMS; /* e.g. 0x00030010 = "3.10" */
private int dwProductVersionLS; /* e.g. 0x00000031 = "0.31" */
private int dwFileFlagsMask; /* = 0x3F for version "0.42" */
private int dwFileFlags; /* e.g. VFF_DEBUG | VFF_PRERELEASE */
private int dwFileOS; /* e.g. VOS_DOS_WINDOWS16 */
private int dwFileType; /* e.g. VFT_DRIVER */
private int dwFileSubtype; /* e.g. VFT2_DRV_KEYBOARD */
private int dwFileDateMS; /* e.g. 0 */
private int dwFileDateLS; /* e.g. 0 */
private int cvRecordDataSize;
private int cvRecordRVA;
private int miscRecordDataSize;
private int miscRecordRVA;
private int moduleNameLength;
private String moduleName;
private CvRecord cvRecord;
private DumpFileReader reader;
private long index;
Module(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
getRVAs();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setBaseOfImage(reader.readNextLong());
setSizeOfImage(reader.readNextInt());
setCheckSum(reader.readNextInt());
setTimeDateStamp(reader.readNextInt());
setModuleNameRVA(reader.readNextInt());
setDwSignature(reader.readNextInt());
setDwStrucVersion(reader.readNextInt());
setDwFileVersionMS(reader.readNextInt());
setDwFileVersionLS(reader.readNextInt());
setDwProductVersionMS(reader.readNextInt());
setDwProductVersionLS(reader.readNextInt());
setDwFileFlagsMask(reader.readNextInt());
setDwFileFlags(reader.readNextInt());
setDwFileOS(reader.readNextInt());
setDwFileType(reader.readNextInt());
setDwFileSubtype(reader.readNextInt());
setDwFileDateMS(reader.readNextInt());
setDwFileDateLS(reader.readNextInt());
setCvRecordDataSize(reader.readNextInt());
setCvRecordRVA(reader.readNextInt());
setMiscRecordDataSize(reader.readNextInt());
setMiscRecordRVA(reader.readNextInt());
reader.readNextLong();
reader.readNextLong();
}
private void getRVAs() throws IOException {
long pos = reader.getPointerIndex();
reader.setPointerIndex(getModuleNameRVA());
moduleNameLength = reader.readNextInt();
moduleName = reader.readNextUnicodeString();
cvRecord = new CvRecord(reader, getCvRecordRVA());
reader.setPointerIndex(pos);
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(QWORD, 8, "BaseOfImage", null);
struct.add(DWORD, 4, "SizeOfImage", null);
struct.add(DWORD, 4, "CheckSum", null);
struct.add(DWORD, 4, "TimeDateStamp", null);
struct.add(Pointer32DataType.dataType, 4, "ModuleNameRVA", null);
StructureDataType sv = new StructureDataType("VersionInfo", 0);
sv.add(DWORD, 4, "Signature", null);
sv.add(DWORD, 4, "StrucVersion", null);
sv.add(DWORD, 4, "FileVersionMS", null);
sv.add(DWORD, 4, "FileVersionLS", null);
sv.add(DWORD, 4, "ProductVersionMS", null);
sv.add(DWORD, 4, "ProductVersionLS", null);
sv.add(DWORD, 4, "FileFlagsMask", null);
sv.add(DWORD, 4, "FileFlags", null);
sv.add(DWORD, 4, "FileOS", null);
sv.add(DWORD, 4, "FileType", null);
sv.add(DWORD, 4, "FileSubtype", null);
sv.add(DWORD, 4, "FileDateMS", null);
sv.add(DWORD, 4, "FileDateLS", null);
StructureDataType s0 = new StructureDataType("CvRecord", 0);
s0.add(DWORD, 4, "DataSize", null);
s0.add(Pointer32DataType.dataType, 4, "RVA", null);
StructureDataType s1 = new StructureDataType("MiscRecord", 0);
s1.add(DWORD, 4, "DataSize", null);
s1.add(Pointer32DataType.dataType, 4, "RVA", null);
struct.add(sv, sv.getLength(), sv.getDisplayName(), null);
struct.add(s0, s0.getLength(), s0.getDisplayName(), null);
struct.add(s1, s1.getLength(), s1.getDisplayName(), null);
struct.add(QWORD, 8, "Reserved0", null);
struct.add(QWORD, 8, "Reserved1", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public long getBaseOfImage() {
return baseOfImage;
}
public void setBaseOfImage(long baseOfImage) {
this.baseOfImage = baseOfImage;
}
public int getSizeOfImage() {
return sizeOfImage;
}
public void setSizeOfImage(int sizeOfImage) {
this.sizeOfImage = sizeOfImage;
}
public int getCheckSum() {
return checkSum;
}
public void setCheckSum(int checkSum) {
this.checkSum = checkSum;
}
public int getTimeDateStamp() {
return timeDateStamp;
}
public void setTimeDateStamp(int timeDateStamp) {
this.timeDateStamp = timeDateStamp;
}
public int getModuleNameRVA() {
return moduleNameRVA;
}
public void setModuleNameRVA(int moduleNameRVA) {
this.moduleNameRVA = moduleNameRVA;
}
public int getDwSignature() {
return dwSignature;
}
public void setDwSignature(int dwSignature) {
this.dwSignature = dwSignature;
}
public int getDwStrucVersion() {
return dwStrucVersion;
}
public void setDwStrucVersion(int dwStrucVersion) {
this.dwStrucVersion = dwStrucVersion;
}
public int getDwFileVersionMS() {
return dwFileVersionMS;
}
public void setDwFileVersionMS(int dwFileVersionMS) {
this.dwFileVersionMS = dwFileVersionMS;
}
public int getDwFileVersionLS() {
return dwFileVersionLS;
}
public void setDwFileVersionLS(int dwFileVersionLS) {
this.dwFileVersionLS = dwFileVersionLS;
}
public int getDwProductVersionMS() {
return dwProductVersionMS;
}
public void setDwProductVersionMS(int dwProductVersionMS) {
this.dwProductVersionMS = dwProductVersionMS;
}
public int getDwProductVersionLS() {
return dwProductVersionLS;
}
public void setDwProductVersionLS(int dwProductVersionLS) {
this.dwProductVersionLS = dwProductVersionLS;
}
public int getDwFileFlagsMask() {
return dwFileFlagsMask;
}
public void setDwFileFlagsMask(int dwFileFlagsMask) {
this.dwFileFlagsMask = dwFileFlagsMask;
}
public int getDwFileFlags() {
return dwFileFlags;
}
public void setDwFileFlags(int dwFileFlags) {
this.dwFileFlags = dwFileFlags;
}
public int getDwFileOS() {
return dwFileOS;
}
public void setDwFileOS(int dwFileOS) {
this.dwFileOS = dwFileOS;
}
public int getDwFileType() {
return dwFileType;
}
public void setDwFileType(int dwFileType) {
this.dwFileType = dwFileType;
}
public int getDwFileSubtype() {
return dwFileSubtype;
}
public void setDwFileSubtype(int dwFileSubtype) {
this.dwFileSubtype = dwFileSubtype;
}
public int getDwFileDateMS() {
return dwFileDateMS;
}
public void setDwFileDateMS(int dwFileDateMS) {
this.dwFileDateMS = dwFileDateMS;
}
public int getDwFileDateLS() {
return dwFileDateLS;
}
public void setDwFileDateLS(int dwFileDateLS) {
this.dwFileDateLS = dwFileDateLS;
}
public int getCvRecordDataSize() {
return cvRecordDataSize;
}
public void setCvRecordDataSize(int cvRecordDataSize) {
this.cvRecordDataSize = cvRecordDataSize;
}
public int getCvRecordRVA() {
return cvRecordRVA;
}
public void setCvRecordRVA(int cvRecordRVA) {
this.cvRecordRVA = cvRecordRVA;
}
public int getMiscRecordDataSize() {
return miscRecordDataSize;
}
public void setMiscRecordDataSize(int miscRecordDataSize) {
this.miscRecordDataSize = miscRecordDataSize;
}
public int getMiscRecordRVA() {
return miscRecordRVA;
}
public void setMiscRecordRVA(int miscRecordRVA) {
this.miscRecordRVA = miscRecordRVA;
}
public int getModuleNameLength() {
return moduleNameLength;
}
public String getModuleName() {
return moduleName;
}
public CvRecord getCvRecord() {
return cvRecord;
}
}

View File

@ -0,0 +1,83 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class ModuleListStream implements StructConverter {
public final static String NAME = "MINIDUMP_MODULE_LIST";
private int numberOfModules;
private Module[] modules;
private DumpFileReader reader;
private long index;
ModuleListStream(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setNumberOfModules(reader.readNextInt());
modules = new Module[numberOfModules];
for (int i = 0; i < numberOfModules; i++) {
setModule(new Module(reader, reader.getPointerIndex()), i);
}
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "NumberOfModules", null);
DataType t = modules[0].toDataType();
ArrayDataType a = new ArrayDataType(t, numberOfModules, t.getLength());
struct.add(a, a.getLength(), "Modules", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public int getNumberOfModules() {
return numberOfModules;
}
public void setNumberOfModules(int numberOfModules) {
this.numberOfModules = numberOfModules;
}
public Module getModule(int idx) {
return modules[idx];
}
public void setModule(Module module, int index) {
this.modules[index] = module;
}
}

View File

@ -0,0 +1,303 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class ProcessVmCountersStream implements StructConverter {
public final static String NAME = "MINIDUMP_PROCESS_VM_COUNTERS";
private short revision;
private short flags;
private int pageFaultCount;
private long peakWorkingSetSize;
private long workingSetSize;
private long quotaPeakPagedPoolUsage;
private long quotaPagedPoolUsage;
private long quotaPeakNonPagedPoolUsage;
private long quotaNonPagedPoolUsage;
private long pagefileUsage;
private long peakPagefileUsage;
private long peakVirtualSize; // VIRTUALSIZE
private long virtualSize; // VIRTUALSIZE
private long privateUsage; // EX+
private long privateWorkingSetSize; // EX2+
private long sharedCommitUsage; // EX2+
private long jobSharedCommitUsage; // JOB+
private long jobPrivateCommitUsage; // JOB+
private long jobPeakPrivateCommitUsage; // JOB+
private long jobPrivateCommitLimit; // JOB+
private long jobTotalCommitLimit; // JOB+
private DumpFileReader reader;
private long index;
private boolean expandedFormat;
ProcessVmCountersStream(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setRevision(reader.readNextShort());
expandedFormat = getRevision() > 1;
if (expandedFormat) {
setFlags(reader.readNextShort());
}
setPageFaultCount(reader.readNextInt());
setPeakWorkingSetSize(reader.readNextLong());
setWorkingSetSize(reader.readNextLong());
setQuotaPeakPagedPoolUsage(reader.readNextLong());
setQuotaPagedPoolUsage(reader.readNextLong());
setQuotaPeakNonPagedPoolUsage(reader.readNextLong());
setQuotaNonPagedPoolUsage(reader.readNextLong());
setPagefileUsage(reader.readNextLong());
setPeakPagefileUsage(reader.readNextLong());
if (expandedFormat) {
setPeakVirtualSize(reader.readNextLong());
setVirtualSize(reader.readNextLong());
}
setPrivateUsage(reader.readNextLong());
if (expandedFormat) {
setPrivateWorkingSetSize(reader.readNextLong());
setSharedCommitUsage(reader.readNextLong());
setJobSharedCommitUsage(reader.readNextLong());
setJobPrivateCommitUsage(reader.readNextLong());
setJobPeakPrivateCommitUsage(reader.readNextLong());
setJobPrivateCommitLimit(reader.readNextLong());
setJobTotalCommitLimit(reader.readNextLong());
}
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(WORD, 2, "Revision", null);
if (expandedFormat) {
struct.add(WORD, 2, "Flags", null);
}
struct.add(DWORD, 4, "PageFaultCount", null);
struct.add(QWORD, 8, "PeakWorkingSetSize", null);
struct.add(QWORD, 8, "WorkingSetSize", null);
struct.add(QWORD, 8, "QuotaPeakPagedPoolUsage", null);
struct.add(QWORD, 8, "QuotaPagedPoolUsage", null);
struct.add(QWORD, 8, "QuotaPeakNonPagedPoolUsage", null);
struct.add(QWORD, 8, "QuotaNonPagedPoolUsage", null);
struct.add(QWORD, 8, "PagefileUsage", null);
struct.add(QWORD, 8, "PeakPagefileUsage", null);
if (expandedFormat) {
struct.add(QWORD, 8, "PeakVirtualSize", null);
struct.add(QWORD, 8, "VirtualSize", null);
}
struct.add(QWORD, 8, "PrivateUsage", null);
if (expandedFormat) {
struct.add(QWORD, 8, "PrivateWorkingSetSize", null);
struct.add(QWORD, 8, "SharedCommitUsage", null);
struct.add(QWORD, 8, "JobSharedCommitUsage", null);
struct.add(QWORD, 8, "JobPrivateCommitUsage", null);
struct.add(QWORD, 8, "JobPeakPrivateCommitUsage", null);
struct.add(QWORD, 8, "JobPrivateCommitLimit", null);
struct.add(QWORD, 8, "JobTotalCommitLimit", null);
}
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public short getRevision() {
return revision;
}
public void setRevision(short revision) {
this.revision = revision;
}
public short getFlags() {
return flags;
}
public void setFlags(short flags) {
this.flags = flags;
}
public int getPageFaultCount() {
return pageFaultCount;
}
public void setPageFaultCount(int pageFaultCount) {
this.pageFaultCount = pageFaultCount;
}
public long getPeakWorkingSetSize() {
return peakWorkingSetSize;
}
public void setPeakWorkingSetSize(long peakWorkingSetSize) {
this.peakWorkingSetSize = peakWorkingSetSize;
}
public long getWorkingSetSize() {
return workingSetSize;
}
public void setWorkingSetSize(long workingSetSize) {
this.workingSetSize = workingSetSize;
}
public long getQuotaPeakPagedPoolUsage() {
return quotaPeakPagedPoolUsage;
}
public void setQuotaPeakPagedPoolUsage(long quotaPeakPagedPoolUsage) {
this.quotaPeakPagedPoolUsage = quotaPeakPagedPoolUsage;
}
public long getQuotaPagedPoolUsage() {
return quotaPagedPoolUsage;
}
public void setQuotaPagedPoolUsage(long quotaPagedPoolUsage) {
this.quotaPagedPoolUsage = quotaPagedPoolUsage;
}
public long getQuotaPeakNonPagedPoolUsage() {
return quotaPeakNonPagedPoolUsage;
}
public void setQuotaPeakNonPagedPoolUsage(long quotaPeakNonPagedPoolUsage) {
this.quotaPeakNonPagedPoolUsage = quotaPeakNonPagedPoolUsage;
}
public long getQuotaNonPagedPoolUsage() {
return quotaNonPagedPoolUsage;
}
public void setQuotaNonPagedPoolUsage(long quotaNonPagedPoolUsage) {
this.quotaNonPagedPoolUsage = quotaNonPagedPoolUsage;
}
public long getPagefileUsage() {
return pagefileUsage;
}
public void setPagefileUsage(long pagefileUsage) {
this.pagefileUsage = pagefileUsage;
}
public long getPeakPagefileUsage() {
return peakPagefileUsage;
}
public void setPeakPagefileUsage(long peakPagefileUsage) {
this.peakPagefileUsage = peakPagefileUsage;
}
public long getPeakVirtualSize() {
return peakVirtualSize;
}
public void setPeakVirtualSize(long peakVirtualSize) {
this.peakVirtualSize = peakVirtualSize;
}
public long getVirtualSize() {
return virtualSize;
}
public void setVirtualSize(long virtualSize) {
this.virtualSize = virtualSize;
}
public long getPrivateUsage() {
return privateUsage;
}
public void setPrivateUsage(long privateUsage) {
this.privateUsage = privateUsage;
}
public long getPrivateWorkingSetSize() {
return privateWorkingSetSize;
}
public void setPrivateWorkingSetSize(long privateWorkingSetSize) {
this.privateWorkingSetSize = privateWorkingSetSize;
}
public long getSharedCommitUsage() {
return sharedCommitUsage;
}
public void setSharedCommitUsage(long sharedCommitUsage) {
this.sharedCommitUsage = sharedCommitUsage;
}
public long getJobSharedCommitUsage() {
return jobSharedCommitUsage;
}
public void setJobSharedCommitUsage(long jobSharedCommitUsage) {
this.jobSharedCommitUsage = jobSharedCommitUsage;
}
public long getJobPrivateCommitUsage() {
return jobPrivateCommitUsage;
}
public void setJobPrivateCommitUsage(long jobPrivateCommitUsage) {
this.jobPrivateCommitUsage = jobPrivateCommitUsage;
}
public long getJobPeakPrivateCommitUsage() {
return jobPeakPrivateCommitUsage;
}
public void setJobPeakPrivateCommitUsage(long jobPeakPrivateCommitUsage) {
this.jobPeakPrivateCommitUsage = jobPeakPrivateCommitUsage;
}
public long getJobPrivateCommitLimit() {
return jobPrivateCommitLimit;
}
public void setJobPrivateCommitLimit(long jobPrivateCommitLimit) {
this.jobPrivateCommitLimit = jobPrivateCommitLimit;
}
public long getJobTotalCommitLimit() {
return jobTotalCommitLimit;
}
public void setJobTotalCommitLimit(long jobTotalCommitLimit) {
this.jobTotalCommitLimit = jobTotalCommitLimit;
}
}

View File

@ -0,0 +1,275 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class SystemInfoStream implements StructConverter {
public final static String NAME = "MINIDUMP_SYSTEM_INFO";
private short processorArchitecture;
private short processorLevel;
private short processorRevision;
private byte numberOfProcessors;
private byte productType;
private int majorVersion;
private int minorVersion;
private int buildNumber;
private int platformId;
private int rva;
private short suiteMask;
private int[] vendorId = new int[3];
private int versionInformation;
private int featureInformation;
private int AMDExtendedCpuFeatures;
private int csdNameLength;
private String csdName;
private DumpFileReader reader;
private long index;
SystemInfoStream(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
getRVAs();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setProcessorArchitecture(reader.readNextShort());
setProcessorLevel(reader.readNextShort());
setProcessorRevision(reader.readNextShort());
setNumberOfProcessors(reader.readNextByte());
setProductType(reader.readNextByte());
setMajorVersion(reader.readNextInt());
setMinorVersion(reader.readNextInt());
setBuildNumber(reader.readNextInt());
setPlatformId(reader.readNextInt());
setCSDRevisionRVA(reader.readNextInt());
setSuiteMask(reader.readNextShort());
reader.readNextShort();
for (int i = 0; i < 3; i++) {
setVendorId(reader.readNextInt(), i);
}
setVersionInformation(reader.readNextInt());
setFeatureInformation(reader.readNextInt());
setAMDExtendedCpuFeatures(reader.readNextInt());
}
private void getRVAs() throws IOException {
long pos = reader.getPointerIndex();
reader.setPointerIndex(getCSDVersionRVA());
csdNameLength = reader.readNextInt();
csdName = reader.readNextUnicodeString();
reader.setPointerIndex(pos);
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(WORD, 2, "ProcessorArchitecture", null);
struct.add(WORD, 2, "ProcessorLevel", null);
struct.add(WORD, 2, "ProcessorRevision", null);
UnionDataType u0 = new UnionDataType(NAME + "_u0");
u0.add(WORD, 2, "Reserved0", null);
StructureDataType s0 = new StructureDataType(NAME + "_s0", 0);
s0.add(BYTE, 1, "NumberOfProcessors", null);
s0.add(BYTE, 1, "ProductType", null);
u0.add(s0, 2, s0.getDisplayName(), null);
struct.add(u0, u0.getLength(), u0.getDisplayName(), null);
struct.add(DWORD, 4, "MajorVersion", null);
struct.add(DWORD, 4, "MinorVersion", null);
struct.add(DWORD, 4, "BuildNumber", null);
struct.add(DWORD, 4, "PlatformId", null);
struct.add(Pointer32DataType.dataType, 4, "CSDVersionRVA", null);
UnionDataType u1 = new UnionDataType(NAME + "_u1");
u1.add(DWORD, 4, "Reserved1", null);
StructureDataType s1 = new StructureDataType(NAME + "_s1", 0);
s1.add(WORD, 2, "SuiteMask", null);
s1.add(WORD, 2, "Reserved1", null);
u1.add(s1, 2, s1.getDisplayName(), null);
struct.add(u1, u1.getLength(), u1.getDisplayName(), null);
UnionDataType u2 = new UnionDataType("CPU_INFORMATION");
StructureDataType s3 = new StructureDataType("X86CpuInfo", 0);
ArrayDataType a0 = new ArrayDataType(DWORD, 3, 4);
s3.add(a0, a0.getLength(), "VendorId", null);
s3.add(DWORD, 4, "VersionInformation", null);
s3.add(DWORD, 4, "FeatureInformation", null);
s3.add(DWORD, 4, "AMDExtendedCpuFeatures", null);
StructureDataType s4 = new StructureDataType("OtherCpuInfo", 0);
ArrayDataType a1 = new ArrayDataType(QWORD, 2, 8);
s4.add(a1, a1.getLength(), "ProcessorFeatures", null);
u2.add(s3, s3.getLength(), s3.getDisplayName(), null);
u2.add(s4, s4.getLength(), s4.getDisplayName(), null);
struct.add(u2, u2.getLength(), u2.getDisplayName(), null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public short getProcessorArchitecture() {
return processorArchitecture;
}
public void setProcessorArchitecture(short processorArchitecture) {
this.processorArchitecture = processorArchitecture;
}
public short getProcessorLevel() {
return processorLevel;
}
public void setProcessorLevel(short processorLevel) {
this.processorLevel = processorLevel;
}
public short getProcessorRevision() {
return processorRevision;
}
public void setProcessorRevision(short processorRevision) {
this.processorRevision = processorRevision;
}
public byte getNumberOfProcessors() {
return numberOfProcessors;
}
public void setNumberOfProcessors(byte numberOfProcessors) {
this.numberOfProcessors = numberOfProcessors;
}
public byte getProductType() {
return productType;
}
public void setProductType(byte productType) {
this.productType = productType;
}
public int getMajorVersion() {
return majorVersion;
}
public void setMajorVersion(int majorVersion) {
this.majorVersion = majorVersion;
}
public int getMinorVersion() {
return minorVersion;
}
public void setMinorVersion(int minorVersion) {
this.minorVersion = minorVersion;
}
public int getBuildNumber() {
return buildNumber;
}
public void setBuildNumber(int buildNumber) {
this.buildNumber = buildNumber;
}
public int getPlatformId() {
return platformId;
}
public void setPlatformId(int platformId) {
this.platformId = platformId;
}
public int getCSDVersionRVA() {
return rva;
}
public void setCSDRevisionRVA(int rva) {
this.rva = rva;
}
public short getSuiteMask() {
return suiteMask;
}
public void setSuiteMask(short suiteMask) {
this.suiteMask = suiteMask;
}
public int getVendorId(int idx) {
return vendorId[idx];
}
public void setVendorId(int vendorId, int index) {
this.vendorId[index] = vendorId;
}
public int getVersionInformation() {
return versionInformation;
}
public void setVersionInformation(int versionInformation) {
this.versionInformation = versionInformation;
}
public int getFeatureInformation() {
return featureInformation;
}
public void setFeatureInformation(int featureInformation) {
this.featureInformation = featureInformation;
}
public int getAMDExtendedCpuFeatures() {
return AMDExtendedCpuFeatures;
}
public void setAMDExtendedCpuFeatures(int extendedCpuFeatures) {
AMDExtendedCpuFeatures = extendedCpuFeatures;
}
public int getCSDNameLength() {
return csdNameLength;
}
public String getCSDName() {
return csdName;
}
}

View File

@ -0,0 +1,406 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.DataType;
import ghidra.program.model.data.StructureDataType;
import ghidra.util.exception.DuplicateNameException;
public class SystemMemoryInfoStream implements StructConverter {
public final static String NAME = "MINIDUMP_SYSTEM_MEMORY_INFO";
private short revision;
private short flags;
private String NAME0 = "MINIDUMP_SYSTEM_BASIC_INFORMATION";
private String NAME1 = "MINIDUMP_SYSTEM_FILECACHE_INFORMATION";
private String NAME2 = "MINIDUMP_SYSTEM_BASIC_PERFORMANCE_INFORMATION";
private String NAME3 = "MINIDUMP_SYSTEM_PERFORMANCE_INFORMATION";
// MINIDUMP_SYSTEM_BASIC_INFORMATION
private int basicTimerResolution;
private int basicPageSize;
private int basicNumberOfPhysicalPages;
private int basicLowestPhysicalPageNumber;
private int basicHighestPhysicalPageNumber;
private int basicAllocationGranularity;
private long basicMinimumUserModeAddress;
private long basicMaximumUserModeAddress;
private long basicActiveProcessorsAffinityMask;
private int basicNumberOfProcessors;
// MINIDUMP_SYSTEM_FILECACHE_INFORMATION
private long fcCurrentSize;
private long fcPeakSize;
private int fcPageFaultCount;
private long fcMinimumWorkingSet;
private long fcMaximumWorkingSet;
private long fcCurrentSizeIncludingTransitionInPages;
private long fcPeakSizeIncludingTransitionInPages;
private int fcTransitionRePurposeCount;
private int fcFlags;
// MINIDUMP_SYSTEM_BASIC_PERFORMANCE_INFORMATION
private long bpAvailablePages;
private long bpCommittedPages;
private long bpCommitLimit;
private long bpPeakCommitment;
// MINIDUMP_SYSTEM_PERFORMANCE_INFORMATION
private long perfIdleProcessTime;
private long perfIoReadTransferCount;
private long perfIoWriteTransferCount;
private long perfIoOtherTransferCount;
private int perfIoReadOperationCount;
private int perfIoWriteOperationCount;
private int perfIoOtherOperationCount;
private int perfAvailablePages;
private int perfCommittedPages;
private int perfCommitLimit;
private int perfPeakCommitment;
private int perfPageFaultCount;
private int perfCopyOnWriteCount;
private int perfTransitionCount;
private int perfCacheTransitionCount;
private int perfDemandZeroCount;
private int perfPageReadCount;
private int perfPageReadIoCount;
private int perfCacheReadCount;
private int perfCacheIoCount;
private int perfDirtyPagesWriteCount;
private int perfDirtyWriteIoCount;
private int perfMappedPagesWriteCount;
private int perfMappedWriteIoCount;
private int perfPagedPoolPages;
private int perfNonPagedPoolPages;
private int perfPagedPoolAllocs;
private int perfPagedPoolFrees;
private int perfNonPagedPoolAllocs;
private int perfNonPagedPoolFrees;
private int perfFreeSystemPtes;
private int perfResidentSystemCodePage;
private int perfTotalSystemDriverPages;
private int perfTotalSystemCodePages;
private int perfNonPagedPoolLookasideHits;
private int perfPagedPoolLookasideHits;
private int perfAvailablePagedPoolPages;
private int perfResidentSystemCachePage;
private int perfResidentPagedPoolPage;
private int perfResidentSystemDriverPage;
private int perfCcFastReadNoWait;
private int perfCcFastReadWait;
private int perfCcFastReadResourceMiss;
private int perfCcFastReadNotPossible;
private int perfCcFastMdlReadNoWait;
private int perfCcFastMdlReadWait;
private int perfCcFastMdlReadResourceMiss;
private int perfCcFastMdlReadNotPossible;
private int perfCcMapDataNoWait;
private int perfCcMapDataWait;
private int perfCcMapDataNoWaitMiss;
private int perfCcMapDataWaitMiss;
private int perfCcPinMappedDataCount;
private int perfCcPinReadNoWait;
private int perfCcPinReadWait;
private int perfCcPinReadNoWaitMiss;
private int perfCcPinReadWaitMiss;
private int perfCcCopyReadNoWait;
private int perfCcCopyReadWait;
private int perfCcCopyReadNoWaitMiss;
private int perfCcCopyReadWaitMiss;
private int perfCcMdlReadNoWait;
private int perfCcMdlReadWait;
private int perfCcMdlReadNoWaitMiss;
private int perfCcMdlReadWaitMiss;
private int perfCcReadAheadIos;
private int perfCcLazyWriteIos;
private int perfCcLazyWritePages;
private int perfCcDataFlushes;
private int perfCcDataPages;
private int ContextSwitches;
private int FirstLevelTbFills;
private int SecondLevelTbFills;
private int SystemCalls;
private long perfCcTotalDirtyPages;
private long perfCcDirtyPageThreshold;
private long perfResidentAvailablePages;
private long perfSharedCommittedPages;
private DumpFileReader reader;
private long index;
SystemMemoryInfoStream(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
revision = reader.readNextShort();
flags = reader.readNextShort();
// MINIDUMP_SYSTEM_BASIC_INFORMATION
basicTimerResolution = reader.readNextInt();
basicPageSize = reader.readNextInt();
basicNumberOfPhysicalPages = reader.readNextInt();
basicLowestPhysicalPageNumber = reader.readNextInt();
basicHighestPhysicalPageNumber = reader.readNextInt();
basicAllocationGranularity = reader.readNextInt();
basicMinimumUserModeAddress = reader.readNextLong();
basicMaximumUserModeAddress = reader.readNextLong();
basicActiveProcessorsAffinityMask = reader.readNextLong();
basicNumberOfProcessors = reader.readNextInt();
// MINIDUMP_SYSTEM_FILECACHE_INFORMATION
fcCurrentSize = reader.readNextLong();
fcPeakSize = reader.readNextLong();
fcPageFaultCount = reader.readNextInt();
fcMinimumWorkingSet = reader.readNextLong();
fcMaximumWorkingSet = reader.readNextLong();
fcCurrentSizeIncludingTransitionInPages = reader.readNextLong();
fcPeakSizeIncludingTransitionInPages = reader.readNextLong();
fcTransitionRePurposeCount = reader.readNextInt();
fcFlags = reader.readNextInt();
// MINIDUMP_SYSTEM_BASIC_PERFORMANCE_INFORMATION
bpAvailablePages = reader.readNextLong();
bpCommittedPages = reader.readNextLong();
bpCommitLimit = reader.readNextLong();
bpPeakCommitment = reader.readNextLong();
// MINIDUMP_SYSTEM_PERFORMANCE_INFORMATION
perfIdleProcessTime = reader.readNextLong();
perfIoReadTransferCount = reader.readNextLong();
perfIoWriteTransferCount = reader.readNextLong();
perfIoOtherTransferCount = reader.readNextLong();
perfIoReadOperationCount = reader.readNextInt();
perfIoWriteOperationCount = reader.readNextInt();
perfIoOtherOperationCount = reader.readNextInt();
perfAvailablePages = reader.readNextInt();
perfCommittedPages = reader.readNextInt();
perfCommitLimit = reader.readNextInt();
perfPeakCommitment = reader.readNextInt();
perfPageFaultCount = reader.readNextInt();
perfCopyOnWriteCount = reader.readNextInt();
perfTransitionCount = reader.readNextInt();
perfCacheTransitionCount = reader.readNextInt();
perfDemandZeroCount = reader.readNextInt();
perfPageReadCount = reader.readNextInt();
perfPageReadIoCount = reader.readNextInt();
perfCacheReadCount = reader.readNextInt();
perfCacheIoCount = reader.readNextInt();
perfDirtyPagesWriteCount = reader.readNextInt();
perfDirtyWriteIoCount = reader.readNextInt();
perfMappedPagesWriteCount = reader.readNextInt();
perfMappedWriteIoCount = reader.readNextInt();
perfPagedPoolPages = reader.readNextInt();
perfNonPagedPoolPages = reader.readNextInt();
perfPagedPoolAllocs = reader.readNextInt();
perfPagedPoolFrees = reader.readNextInt();
perfNonPagedPoolAllocs = reader.readNextInt();
perfNonPagedPoolFrees = reader.readNextInt();
perfFreeSystemPtes = reader.readNextInt();
perfResidentSystemCodePage = reader.readNextInt();
perfTotalSystemDriverPages = reader.readNextInt();
perfTotalSystemCodePages = reader.readNextInt();
perfNonPagedPoolLookasideHits = reader.readNextInt();
perfPagedPoolLookasideHits = reader.readNextInt();
perfAvailablePagedPoolPages = reader.readNextInt();
perfResidentSystemCachePage = reader.readNextInt();
perfResidentPagedPoolPage = reader.readNextInt();
perfResidentSystemDriverPage = reader.readNextInt();
perfCcFastReadNoWait = reader.readNextInt();
perfCcFastReadWait = reader.readNextInt();
perfCcFastReadResourceMiss = reader.readNextInt();
perfCcFastReadNotPossible = reader.readNextInt();
perfCcFastMdlReadNoWait = reader.readNextInt();
perfCcFastMdlReadWait = reader.readNextInt();
perfCcFastMdlReadResourceMiss = reader.readNextInt();
perfCcFastMdlReadNotPossible = reader.readNextInt();
perfCcMapDataNoWait = reader.readNextInt();
perfCcMapDataWait = reader.readNextInt();
perfCcMapDataNoWaitMiss = reader.readNextInt();
perfCcMapDataWaitMiss = reader.readNextInt();
perfCcPinMappedDataCount = reader.readNextInt();
perfCcPinReadNoWait = reader.readNextInt();
perfCcPinReadWait = reader.readNextInt();
perfCcPinReadNoWaitMiss = reader.readNextInt();
perfCcPinReadWaitMiss = reader.readNextInt();
perfCcCopyReadNoWait = reader.readNextInt();
perfCcCopyReadWait = reader.readNextInt();
perfCcCopyReadNoWaitMiss = reader.readNextInt();
perfCcCopyReadWaitMiss = reader.readNextInt();
perfCcMdlReadNoWait = reader.readNextInt();
perfCcMdlReadWait = reader.readNextInt();
perfCcMdlReadNoWaitMiss = reader.readNextInt();
perfCcMdlReadWaitMiss = reader.readNextInt();
perfCcReadAheadIos = reader.readNextInt();
perfCcLazyWriteIos = reader.readNextInt();
perfCcLazyWritePages = reader.readNextInt();
perfCcDataFlushes = reader.readNextInt();
perfCcDataPages = reader.readNextInt();
ContextSwitches = reader.readNextInt();
FirstLevelTbFills = reader.readNextInt();
SecondLevelTbFills = reader.readNextInt();
SystemCalls = reader.readNextInt();
perfCcTotalDirtyPages = reader.readNextLong();
perfCcDirtyPageThreshold = reader.readNextLong();
perfResidentAvailablePages = reader.readNextLong();
perfSharedCommittedPages = reader.readNextLong();
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(WORD, 2, "Revision", null);
struct.add(WORD, 2, "Flags", null);
StructureDataType s0 = new StructureDataType(NAME0, 0);
StructureDataType s1 = new StructureDataType(NAME1, 0);
StructureDataType s2 = new StructureDataType(NAME2, 0);
StructureDataType s3 = new StructureDataType(NAME3, 0);
s0.add(DWORD, 4, "TimerResolution", null);
s0.add(DWORD, 4, "PageSize", null);
s0.add(DWORD, 4, "NumberOfPhysicalPages", null);
s0.add(DWORD, 4, "LowestPhysicalPageNumber", null);
s0.add(DWORD, 4, "HighestPhysicalPageNumber", null);
s0.add(DWORD, 4, "AllocationGranularity", null);
s0.add(QWORD, 8, "MinimumUserModeAddress", null);
s0.add(QWORD, 8, "MaximumUserModeAddress", null);
s0.add(QWORD, 8, "ActiveProcessorsAffinityMask", null);
s0.add(DWORD, 4, "NumberOfProcessors", null);
struct.add(s0, s0.getLength(), NAME0, null);
s1.add(QWORD, 8, "CurrentSize", null);
s1.add(QWORD, 8, "PeakSize", null);
s1.add(DWORD, 4, "PageFaultCount", null);
s1.add(QWORD, 8, "MinimumWorkingSet", null);
s1.add(QWORD, 8, "MaximumWorkingSet", null);
s1.add(QWORD, 8, "CurrentSizeIncludingTransitionInPages", null);
s1.add(QWORD, 8, "PeakSizeIncludingTransitionInPages", null);
s1.add(DWORD, 4, "TransitionRePurposeCount", null);
s1.add(DWORD, 4, "Flags", null);
struct.add(s1, s1.getLength(), NAME1, null);
s2.add(QWORD, 8, "AvailablePages", null);
s2.add(QWORD, 8, "CommittedPages", null);
s2.add(QWORD, 8, "CommitLimit", null);
s2.add(QWORD, 8, "PeakCommitment", null);
struct.add(s2, s2.getLength(), NAME2, null);
s3.add(QWORD, 8, "IdleProcessTime", null);
s3.add(QWORD, 8, "IoReadTransferCount", null);
s3.add(QWORD, 8, "IoWriteTransferCount", null);
s3.add(QWORD, 8, "IoOtherTransferCount", null);
s3.add(DWORD, 4, "IoReadOperationCount", null);
s3.add(DWORD, 4, "IoWriteOperationCount", null);
s3.add(DWORD, 4, "IoOtherOperationCount", null);
s3.add(DWORD, 4, "AvailablePages", null);
s3.add(DWORD, 4, "CommittedPages", null);
s3.add(DWORD, 4, "CommitLimit", null);
s3.add(DWORD, 4, "PeakCommitment", null);
s3.add(DWORD, 4, "PageFaultCount", null);
s3.add(DWORD, 4, "CopyOnWriteCount", null);
s3.add(DWORD, 4, "TransitionCount", null);
s3.add(DWORD, 4, "CacheTransitionCount", null);
s3.add(DWORD, 4, "DemandZeroCount", null);
s3.add(DWORD, 4, "PageReadCount", null);
s3.add(DWORD, 4, "PageReadIoCount", null);
s3.add(DWORD, 4, "CacheReadCount", null);
s3.add(DWORD, 4, "CacheIoCount", null);
s3.add(DWORD, 4, "DirtyPagesWriteCount", null);
s3.add(DWORD, 4, "DirtyWriteIoCount", null);
s3.add(DWORD, 4, "MappedPagesWriteCount", null);
s3.add(DWORD, 4, "MappedWriteIoCount", null);
s3.add(DWORD, 4, "PagedPoolPages", null);
s3.add(DWORD, 4, "NonPagedPoolPages", null);
s3.add(DWORD, 4, "PagedPoolAllocs", null);
s3.add(DWORD, 4, "PagedPoolFrees", null);
s3.add(DWORD, 4, "NonPagedPoolAllocs", null);
s3.add(DWORD, 4, "NonPagedPoolFrees", null);
s3.add(DWORD, 4, "FreeSystemPtes", null);
s3.add(DWORD, 4, "ResidentSystemCodePage", null);
s3.add(DWORD, 4, "TotalSystemDriverPages", null);
s3.add(DWORD, 4, "TotalSystemCodePages", null);
s3.add(DWORD, 4, "NonPagedPoolLookasideHits", null);
s3.add(DWORD, 4, "PagedPoolLookasideHits", null);
s3.add(DWORD, 4, "AvailablePagedPoolPages", null);
s3.add(DWORD, 4, "ResidentSystemCachePage", null);
s3.add(DWORD, 4, "ResidentPagedPoolPage", null);
s3.add(DWORD, 4, "ResidentSystemDriverPage", null);
s3.add(DWORD, 4, "CcFastReadNoWait", null);
s3.add(DWORD, 4, "CcFastReadWait", null);
s3.add(DWORD, 4, "CcFastReadResourceMiss", null);
s3.add(DWORD, 4, "CcFastReadNotPossible", null);
s3.add(DWORD, 4, "CcFastMdlReadNoWait", null);
s3.add(DWORD, 4, "CcFastMdlReadWait", null);
s3.add(DWORD, 4, "CcFastMdlReadResourceMiss", null);
s3.add(DWORD, 4, "CcFastMdlReadNotPossible", null);
s3.add(DWORD, 4, "CcMapDataNoWait", null);
s3.add(DWORD, 4, "CcMapDataWait", null);
s3.add(DWORD, 4, "CcMapDataNoWaitMiss", null);
s3.add(DWORD, 4, "CcMapDataWaitMiss", null);
s3.add(DWORD, 4, "CcPinMappedDataCount", null);
s3.add(DWORD, 4, "CcPinReadNoWait", null);
s3.add(DWORD, 4, "CcPinReadWait", null);
s3.add(DWORD, 4, "CcPinReadNoWaitMiss", null);
s3.add(DWORD, 4, "CcPinReadWaitMiss", null);
s3.add(DWORD, 4, "CcCopyReadNoWait", null);
s3.add(DWORD, 4, "CcCopyReadWait", null);
s3.add(DWORD, 4, "CcCopyReadNoWaitMiss", null);
s3.add(DWORD, 4, "CcCopyReadWaitMiss", null);
s3.add(DWORD, 4, "CcMdlReadNoWait", null);
s3.add(DWORD, 4, "CcMdlReadWait", null);
s3.add(DWORD, 4, "CcMdlReadNoWaitMiss", null);
s3.add(DWORD, 4, "CcMdlReadWaitMiss", null);
s3.add(DWORD, 4, "CcReadAheadIos", null);
s3.add(DWORD, 4, "CcLazyWriteIos", null);
s3.add(DWORD, 4, "CcLazyWritePages", null);
s3.add(DWORD, 4, "CcDataFlushes", null);
s3.add(DWORD, 4, "CcDataPages", null);
s3.add(DWORD, 4, "ContextSwitches", null);
s3.add(DWORD, 4, "FirstLevelTbFills", null);
s3.add(DWORD, 4, "SecondLevelTbFills", null);
s3.add(DWORD, 4, "SystemCalls", null);
s3.add(QWORD, 8, "CcTotalDirtyPages", null);
s3.add(QWORD, 8, "CcDirtyPageThreshold", null);
s3.add(QWORD, 8, "ResidentAvailablePages", null);
s3.add(QWORD, 8, "SharedCommittedPages", null);
struct.add(s3, s3.getLength(), NAME3, null);
return struct;
}
}

View File

@ -0,0 +1,184 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class Thread implements StructConverter {
public final static String NAME = "MINIDUMP_THREAD";
private int threadId;
private int suspendCount;
private int priorityClass;
private int platformId;
private int priority;
private long teb;
private long stackStartOfMemoryRange;
private int stackDataSize;
private int stackRVA;
private int contextDataSize;
private int contextRVA;
private DumpFileReader reader;
private long index;
public Thread(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setThreadId(reader.readNextInt());
setSuspendCount(reader.readNextInt());
setPriorityClass(reader.readNextInt());
setPriority(reader.readNextInt());
setTeb(reader.readNextLong());
setStackStartOfMemoryRange(reader.readNextLong());
setStackDataSize(reader.readNextInt());
setStackRVA(reader.readNextInt());
setContextDataSize(reader.readNextInt());
setContextRVA(reader.readNextInt());
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "ThreadId", null);
struct.add(DWORD, 4, "SuspendCount", null);
struct.add(DWORD, 4, "PriorityClass", null);
struct.add(DWORD, 4, "Priority", null);
struct.add(QWORD, 8, "Teb", null);
StructureDataType s0 = new StructureDataType("Stack", 0);
s0.add(QWORD, 8, "StartOfMemoryRange", null);
s0.add(DWORD, 4, "DataSize", null);
s0.add(Pointer32DataType.dataType, 4, "RVA", null);
StructureDataType s1 = new StructureDataType("Context", 0);
s1.add(DWORD, 4, "DataSize", null);
s1.add(Pointer32DataType.dataType, 4, "RVA", null);
struct.add(s0, s0.getLength(), s0.getDisplayName(), null);
struct.add(s1, s1.getLength(), s1.getDisplayName(), null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public int getThreadId() {
return threadId;
}
public void setThreadId(int threadId) {
this.threadId = threadId;
}
public int getSuspendCount() {
return suspendCount;
}
public void setSuspendCount(int suspendCount) {
this.suspendCount = suspendCount;
}
public int getPriorityClass() {
return priorityClass;
}
public void setPriorityClass(int priorityClass) {
this.priorityClass = priorityClass;
}
public int getPlatformId() {
return platformId;
}
public void setPlatformId(int platformId) {
this.platformId = platformId;
}
public int getPriority() {
return priority;
}
public void setPriority(int priority) {
this.priority = priority;
}
public long getTeb() {
return teb;
}
public void setTeb(long teb) {
this.teb = teb;
}
public long getStackStartOfMemoryRange() {
return stackStartOfMemoryRange;
}
public void setStackStartOfMemoryRange(long stackStartOfMemoryRange) {
this.stackStartOfMemoryRange = stackStartOfMemoryRange;
}
public int getStackDataSize() {
return stackDataSize;
}
public void setStackDataSize(int stackDataSize) {
this.stackDataSize = stackDataSize;
}
public int getStackRVA() {
return stackRVA;
}
public void setStackRVA(int stackRVA) {
this.stackRVA = stackRVA;
}
public int getContextDataSize() {
return contextDataSize;
}
public void setContextDataSize(int contextDataSize) {
this.contextDataSize = contextDataSize;
}
public int getContextRVA() {
return contextRVA;
}
public void setContextRVA(int contextRVA) {
this.contextRVA = contextRVA;
}
}

View File

@ -0,0 +1,221 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class ThreadEx implements StructConverter {
public final static String NAME = "MINIDUMP_THREAD_EX";
private int threadId;
private int suspendCount;
private int priorityClass;
private int platformId;
private int priority;
private long teb;
private long stackStartOfMemoryRange;
private int stackDataSize;
private int stackRVA;
private int contextDataSize;
private int contextRVA;
private long backingStoreStartOfMemoryRange;
private int backingStoreDataSize;
private int backingStoreRVA;
private DumpFileReader reader;
private long index;
ThreadEx(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setThreadId(reader.readNextInt());
setSuspendCount(reader.readNextInt());
setPriorityClass(reader.readNextInt());
setPriority(reader.readNextInt());
setTeb(reader.readNextLong());
setStackStartOfMemoryRange(reader.readNextLong());
setStackDataSize(reader.readNextInt());
setStackRVA(reader.readNextInt());
setContextDataSize(reader.readNextInt());
setContextRVA(reader.readNextInt());
setBackingStoreStartOfMemoryRange(reader.readNextLong());
setBackingStoreDataSize(reader.readNextInt());
setBackingStoreRVA(reader.readNextInt());
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "ThreadId", null);
struct.add(DWORD, 4, "SuspendCount", null);
struct.add(DWORD, 4, "PriorityClass", null);
struct.add(DWORD, 4, "Priority", null);
struct.add(QWORD, 8, "Teb", null);
StructureDataType s0 = new StructureDataType("Stack", 0);
s0.add(DWORD, 4, "StartOfMemoryRange", null);
s0.add(DWORD, 4, "DataSize", null);
s0.add(Pointer32DataType.dataType, 4, "RVA", null);
StructureDataType s1 = new StructureDataType("Context", 0);
s1.add(DWORD, 4, "DataSize", null);
s1.add(Pointer32DataType.dataType, 4, "RVA", null);
StructureDataType s2 = new StructureDataType("BackingStore", 0);
s2.add(DWORD, 4, "StartOfMemoryRange", null);
s2.add(DWORD, 4, "DataSize", null);
s2.add(Pointer32DataType.dataType, 4, "RVA", null);
struct.add(s0, s0.getLength(), s0.getDisplayName(), null);
struct.add(s1, s1.getLength(), s1.getDisplayName(), null);
struct.add(s2, s2.getLength(), s2.getDisplayName(), null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public int getThreadId() {
return threadId;
}
public void setThreadId(int threadId) {
this.threadId = threadId;
}
public int getSuspendCount() {
return suspendCount;
}
public void setSuspendCount(int suspendCount) {
this.suspendCount = suspendCount;
}
public int getPriorityClass() {
return priorityClass;
}
public void setPriorityClass(int priorityClass) {
this.priorityClass = priorityClass;
}
public int getPlatformId() {
return platformId;
}
public void setPlatformId(int platformId) {
this.platformId = platformId;
}
public int getPriority() {
return priority;
}
public void setPriority(int priority) {
this.priority = priority;
}
public long getTeb() {
return teb;
}
public void setTeb(long teb) {
this.teb = teb;
}
public long getStackStartOfMemoryRange() {
return stackStartOfMemoryRange;
}
public void setStackStartOfMemoryRange(long stackStartOfMemoryRange) {
this.stackStartOfMemoryRange = stackStartOfMemoryRange;
}
public int getStackDataSize() {
return stackDataSize;
}
public void setStackDataSize(int stackDataSize) {
this.stackDataSize = stackDataSize;
}
public int getStackRVA() {
return stackRVA;
}
public void setStackRVA(int stackRVA) {
this.stackRVA = stackRVA;
}
public int getContextDataSize() {
return contextDataSize;
}
public void setContextDataSize(int contextDataSize) {
this.contextDataSize = contextDataSize;
}
public int getContextRVA() {
return contextRVA;
}
public void setContextRVA(int contextRVA) {
this.contextRVA = contextRVA;
}
public void setBackingStoreStartOfMemoryRange(
long backingStoreStartOfMemoryRange) {
this.backingStoreStartOfMemoryRange = backingStoreStartOfMemoryRange;
}
public long getBackingStoreStartOfMemoryRange() {
return backingStoreStartOfMemoryRange;
}
public void setBackingStoreDataSize(int backingStoreDataSize) {
this.backingStoreDataSize = backingStoreDataSize;
}
public int getBackingStoreDataSize() {
return backingStoreDataSize;
}
public void setBackingStoreRVA(int backingStoreRVA) {
this.backingStoreRVA = backingStoreRVA;
}
public int getBackingStoreRVA() {
return backingStoreRVA;
}
}

View File

@ -0,0 +1,83 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class ThreadExListStream implements StructConverter {
public final static String NAME = "MINIDUMP_THREAD_EX_LIST";
private int numberOfThreads;
private ThreadEx[] threads;
private DumpFileReader reader;
private long index;
ThreadExListStream(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setNumberOfThreads(reader.readNextInt());
threads = new ThreadEx[numberOfThreads];
for (int i = 0; i < numberOfThreads; i++) {
setThread(new ThreadEx(reader, reader.getPointerIndex()), i);
}
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "NumberOfThreads", null);
DataType t = threads[0].toDataType();
ArrayDataType a = new ArrayDataType(t, numberOfThreads, t.getLength());
struct.add(a, a.getLength(), "Threads", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public int getNumberOfThreads() {
return numberOfThreads;
}
public void setNumberOfThreads(int numberOfThreads) {
this.numberOfThreads = numberOfThreads;
}
public ThreadEx getThread(int idx) {
return threads[idx];
}
public void setThread(ThreadEx thread, int index) {
this.threads[index] = thread;
}
}

View File

@ -0,0 +1,167 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class ThreadInfo implements StructConverter {
public final static String NAME = "MINIDUMP_THREAD_INFO";
private int threadId;
private int dumpFlags;
private int dumpError;
private int exitStatus;
private long createTime;
private long exitTime;
private long kernelTime;
private long userTime;
private long startAddress;
private long affinity;
private DumpFileReader reader;
private long index;
ThreadInfo(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setThreadId(reader.readNextInt());
setDumpFlags(reader.readNextInt());
setDumpError(reader.readNextInt());
setExitStatus(reader.readNextInt());
setCreateTime(reader.readNextLong());
setExitTime(reader.readNextLong());
setKernelTime(reader.readNextLong());
setUserTime(reader.readNextLong());
setStartAddress(reader.readNextLong());
setAffinity(reader.readNextLong());
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "ThreadId", null);
struct.add(DWORD, 4, "DumpFlags", null);
struct.add(DWORD, 4, "DumpError", null);
struct.add(DWORD, 4, "ExitStatus", null);
struct.add(QWORD, 8, "CreateTime", null);
struct.add(QWORD, 8, "ExitTime", null);
struct.add(QWORD, 8, "KernelTime", null);
struct.add(QWORD, 8, "UserTime", null);
struct.add(QWORD, 8, "StartAddress", null);
struct.add(QWORD, 8, "Affinity", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public int getThreadId() {
return threadId;
}
public void setThreadId(int threadId) {
this.threadId = threadId;
}
public int getDumpFlags() {
return dumpFlags;
}
public void setDumpFlags(int dumpFlags) {
this.dumpFlags = dumpFlags;
}
public int getDumpError() {
return dumpError;
}
public void setDumpError(int dumpError) {
this.dumpError = dumpError;
}
public int getExitStatus() {
return exitStatus;
}
public void setExitStatus(int exitStatus) {
this.exitStatus = exitStatus;
}
public long getCreateTime() {
return createTime;
}
public void setCreateTime(long createTime) {
this.createTime = createTime;
}
public long getExitTime() {
return exitTime;
}
public void setExitTime(long exitTime) {
this.exitTime = exitTime;
}
public long getKernelTime() {
return kernelTime;
}
public void setKernelTime(long kernelTime) {
this.kernelTime = kernelTime;
}
public long getUserTime() {
return userTime;
}
public void setUserTime(long userTime) {
this.userTime = userTime;
}
public long getStartAddress() {
return startAddress;
}
public void setStartAddress(long startAddress) {
this.startAddress = startAddress;
}
public long getAffinity() {
return affinity;
}
public void setAffinity(long affinity) {
this.affinity = affinity;
}
}

View File

@ -0,0 +1,106 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class ThreadInfoListStream implements StructConverter {
public final static String NAME = "MINIDUMP_THREAD_INFO_LIST";
private int sizeOfHeader;
private int sizeOfEntry;
private int numberOfEntries;
private ThreadInfo[] entries;
private DumpFileReader reader;
private long index;
ThreadInfoListStream(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setSizeOfHeader(reader.readNextInt());
setSizeOfEntry(reader.readNextInt());
setNumberOfEntries(reader.readNextInt());
entries = new ThreadInfo[numberOfEntries];
for (int i = 0; i < numberOfEntries; i++) {
setThreadInfo(new ThreadInfo(reader, reader.getPointerIndex()), i);
}
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "SizeOfHeader", null);
struct.add(DWORD, 4, "SizeOfEntry", null);
struct.add(DWORD, 4, "NumberOfThreads", null);
DataType t = entries[0].toDataType();
ArrayDataType a = new ArrayDataType(t, numberOfEntries, t.getLength());
struct.add(a, a.getLength(), "Threads", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public void setSizeOfHeader(int sizeOfHeader) {
this.sizeOfHeader = sizeOfHeader;
}
public int getSizeOfHeader() {
return sizeOfHeader;
}
public void setSizeOfEntry(int sizeOfEntry) {
this.sizeOfEntry = sizeOfEntry;
}
public int getSizeOfEntry() {
return sizeOfEntry;
}
public int getNumberOfEntries() {
return numberOfEntries;
}
public void setNumberOfEntries(int numberOfEntries) {
this.numberOfEntries = numberOfEntries;
}
public ThreadInfo getThreadInfo(int idx) {
return entries[idx];
}
public void setThreadInfo(ThreadInfo thread, int index) {
this.entries[index] = thread;
}
}

View File

@ -0,0 +1,83 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class ThreadListStream implements StructConverter {
public final static String NAME = "MINIDUMP_THREAD_LIST";
private int numberOfThreads;
private Thread[] threads;
private DumpFileReader reader;
private long index;
ThreadListStream(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setNumberOfThreads(reader.readNextInt());
threads = new Thread[numberOfThreads];
for (int i = 0; i < numberOfThreads; i++) {
setThread(new Thread(reader, reader.getPointerIndex()), i);
}
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "NumberOfThreads", null);
DataType t = threads[0].toDataType();
ArrayDataType a = new ArrayDataType(t, numberOfThreads, t.getLength());
struct.add(a, a.getLength(), "Threads", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public int getNumberOfThreads() {
return numberOfThreads;
}
public void setNumberOfThreads(int numberOfThreads) {
this.numberOfThreads = numberOfThreads;
}
public Thread getThread(int idx) {
return threads[idx];
}
public void setThread(Thread thread, int index) {
this.threads[index] = thread;
}
}

View File

@ -0,0 +1,90 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.DataType;
import ghidra.program.model.data.StructureDataType;
import ghidra.util.exception.DuplicateNameException;
public class Token implements StructConverter {
public final static String NAME = "MINIDUMP_TOKEN";
private int tokenSize;
private int tokenId;
private long tokenHandle;
private DumpFileReader reader;
private long index;
Token(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setTokenSize(reader.readNextInt());
setTokenId(reader.readNextInt());
setTokenHandle(reader.readNextInt());
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "Size", null);
struct.add(DWORD, 4, "Id", null);
struct.add(QWORD, 8, "Handle", null);
return struct;
}
public int getTokenSize() {
return tokenSize;
}
public void setTokenSize(int tokenSize) {
this.tokenSize = tokenSize;
}
public int getTokenId() {
return tokenId;
}
public void setTokenId(int tokenId) {
this.tokenId = tokenId;
}
public long getTokenHandle() {
return tokenHandle;
}
public void setTokenHandle(long tokenHandle) {
this.tokenHandle = tokenHandle;
}
}

View File

@ -0,0 +1,116 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class TokenListStream implements StructConverter {
public final static String NAME = "MINIDUMP_TOKEN_LIST";
private int tokenListSize;
private int tokenListEntries;
private int listHeaderSize;
private int elementHeaderSize;
private Token[] tokens;
private DumpFileReader reader;
private long index;
TokenListStream(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setTokenListSize(reader.readNextInt());
setTokenListEntries(reader.readNextInt());
setListHeaderSize(reader.readNextInt());
setElementHeaderSize(reader.readNextInt());
tokens = new Token[getTokenListEntries()];
for (int i = 0; i < getTokenListEntries(); i++) {
setToken(new Token(reader, reader.getPointerIndex()), i);
}
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "TokenListSize", null);
struct.add(DWORD, 4, "TokenListEntries", null);
struct.add(DWORD, 4, "ListHeaderSize", null);
struct.add(DWORD, 4, "ElementHeaderSize", null);
DataType t = tokens[0].toDataType();
ArrayDataType a = new ArrayDataType(t, getTokenListEntries(), t.getLength());
struct.add(a, a.getLength(), "Tokens", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public Token getToken(int idx) {
return tokens[idx];
}
public void setToken(Token token, int index) {
this.tokens[index] = token;
}
public int getTokenListSize() {
return tokenListSize;
}
public void setTokenListSize(int tokenListSize) {
this.tokenListSize = tokenListSize;
}
public int getTokenListEntries() {
return tokenListEntries;
}
public void setTokenListEntries(int tokenListEntries) {
this.tokenListEntries = tokenListEntries;
}
public int getListHeaderSize() {
return listHeaderSize;
}
public void setListHeaderSize(int listHeaderSize) {
this.listHeaderSize = listHeaderSize;
}
public int getElementHeaderSize() {
return elementHeaderSize;
}
public void setElementHeaderSize(int elementHeaderSize) {
this.elementHeaderSize = elementHeaderSize;
}
}

View File

@ -0,0 +1,113 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class UnloadedModule implements StructConverter {
public final static String NAME = "MINIDUMP_UNLOADED_MODULE";
private long baseOfImage;
private int sizeOfImage;
private int checkSum;
private int timeDateStamp;
private int moduleNameRVA;
private DumpFileReader reader;
private long index;
UnloadedModule(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setBaseOfImage(reader.readNextLong());
setSizeOfImage(reader.readNextInt());
setCheckSum(reader.readNextInt());
setTimeDateStamp(reader.readNextInt());
setModuleNameRVA(reader.readNextInt());
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(QWORD, 8, "BaseOfImage", null);
struct.add(DWORD, 4, "SizeOfImage", null);
struct.add(DWORD, 4, "CheckSum", null);
struct.add(DWORD, 4, "TimeDateStamp", null);
struct.add(Pointer32DataType.dataType, 4, "ModuleNameRVA", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public long getBaseOfImage() {
return baseOfImage;
}
public void setBaseOfImage(long baseOfImage) {
this.baseOfImage = baseOfImage;
}
public int getSizeOfImage() {
return sizeOfImage;
}
public void setSizeOfImage(int sizeOfImage) {
this.sizeOfImage = sizeOfImage;
}
public int getCheckSum() {
return checkSum;
}
public void setCheckSum(int checkSum) {
this.checkSum = checkSum;
}
public int getTimeDateStamp() {
return timeDateStamp;
}
public void setTimeDateStamp(int timeDateStamp) {
this.timeDateStamp = timeDateStamp;
}
public int getModuleNameRVA() {
return moduleNameRVA;
}
public void setModuleNameRVA(int moduleNameRVA) {
this.moduleNameRVA = moduleNameRVA;
}
}

View File

@ -0,0 +1,105 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.mdmp;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class UnloadedModuleListStream implements StructConverter {
public final static String NAME = "MINIDUMP_UNLOADED_MODULE_LIST";
private int sizeOfHeader;
private int sizeOfEntry;
private int numberOfEntries;
private UnloadedModule[] entries;
private DumpFileReader reader;
private long index;
UnloadedModuleListStream(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setSizeOfHeader(reader.readNextInt());
setSizeOfEntry(reader.readNextInt());
setNumberOfEntriess(reader.readNextInt());
entries = new UnloadedModule[numberOfEntries];
for (int i = 0; i < numberOfEntries; i++) {
setEntry(new UnloadedModule(reader, reader.getPointerIndex()), i);
}
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "SizeOfHeader", null);
struct.add(DWORD, 4, "SizeOfDescriptor", null);
struct.add(DWORD, 4, "NumberOfHandles", null);
DataType t = entries[0].toDataType();
ArrayDataType a = new ArrayDataType(t, numberOfEntries, t.getLength());
struct.add(a, a.getLength(), "Handles", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public int getNumberOfEntries() {
return numberOfEntries;
}
public void setNumberOfEntriess(int numberOfEntries) {
this.numberOfEntries = numberOfEntries;
}
public UnloadedModule getEntry(int idx) {
return entries[idx];
}
public void setEntry(UnloadedModule entry, int index) {
this.entries[index] = entry;
}
public void setSizeOfHeader(int sizeOfHeader) {
this.sizeOfHeader = sizeOfHeader;
}
public int getSizeOfHeader() {
return sizeOfHeader;
}
public void setSizeOfEntry(int sizeOfEntry) {
this.sizeOfEntry = sizeOfEntry;
}
public int getSizeOfEntry() {
return sizeOfEntry;
}
}

View File

@ -0,0 +1,112 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.pagedump;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class ExceptionRecord implements StructConverter {
public final static String NAME = "PAGEDUMP_EXCEPTION_RECORD";
private int exceptionCode;
private int exceptionFlags;
private long exceptionRecord;
private long exceptionAddress;
private int numberOfParameters;
private DumpFileReader reader;
private long index;
ExceptionRecord(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setExceptionCode(reader.readNextInt());
setExceptionFlags(reader.readNextInt());
setExceptionRecord(reader.readNextPointer());
setExceptionAddress(reader.readNextPointer());
setNumberOfParameters(reader.readNextInt());
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "ExceptionCode", null);
struct.add(DWORD, 4, "ExceptionFlags", null);
struct.add(DWORD, 4, "ExceptionRecord", null);
struct.add(DWORD, 4, "ExceptionAddress", null);
struct.add(DWORD, 4, "NumberOfParameters", null);
struct.setCategoryPath(new CategoryPath("/MDMP"));
return struct;
}
public int getExceptionCode() {
return exceptionCode;
}
public void setExceptionCode(int exceptionCode) {
this.exceptionCode = exceptionCode;
}
public int getExceptionFlags() {
return exceptionFlags;
}
public void setExceptionFlags(int exceptionFlags) {
this.exceptionFlags = exceptionFlags;
}
public long getExceptionRecord() {
return exceptionRecord;
}
public void setExceptionRecord(long exceptionRecord) {
this.exceptionRecord = exceptionRecord;
}
public long getExceptionAddress() {
return exceptionAddress;
}
public void setExceptionAddress(long exceptionAddress) {
this.exceptionAddress = exceptionAddress;
}
public int getNumberOfParameters() {
return numberOfParameters;
}
public void setNumberOfParameters(int numberOfParameters) {
this.numberOfParameters = numberOfParameters;
}
}

View File

@ -0,0 +1,165 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.pagedump;
import java.io.IOException;
import java.util.*;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
public class FullDumpHeader implements StructConverter {
public final static String NAME = "PAGEDUMP_FULL";
public static final int SIGNATURE = 0x45474150; // "PAGE"
private int signature;
private int validDump;
private long dumpOptions;
private long headerSize;
private long bitmapSize;
private long pages;
private byte[] buffer;
private Map<Integer, Integer> pfnToRva = new HashMap<>();
private DumpFileReader reader;
private long index;
FullDumpHeader(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setSignature(reader.readNextInt());
setValidDump(reader.readNextInt());
setDumpOptions(reader.readNextLong());
reader.readNextLong();
reader.readNextLong();
setHeaderSize(reader.readNextLong());
setBitmapSize(reader.readNextLong());
setPages(reader.readNextLong());
buffer = new byte[(int) (pages + 7) / 8];
int pfn = 0;
int rvan = 0;
for (int i = 0; i < buffer.length; i++) {
buffer[i] = reader.readNextByte();
short temp = (short) (buffer[i] + 256);
for (int j = 0; j < 8; j++) {
int bitval = (temp >> j) % 2;
if (bitval != 0) {
pfn = i * 8 + j;
pfnToRva.put(pfn, rvan++);
}
}
}
}
@Override
public DataType toDataType() {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(STRING, 4, "Signature", null);
struct.add(STRING, 4, "ValidDump", null);
struct.add(QWORD, 8, "DumpOptions", null);
struct.add(QWORD, 8, "", null);
struct.add(QWORD, 8, "", null);
struct.add(QWORD, 8, "HeaderSize", null);
struct.add(QWORD, 8, "BitmapSize", null);
struct.add(QWORD, 8, "Pages", null);
if (bitmapSize > 0) {
ArrayDataType a = new ArrayDataType(BYTE, (int) (pages / 8), 1);
struct.add(a, a.getLength(), "Buffer", null);
}
struct.setCategoryPath(new CategoryPath("/PDMP"));
return struct;
}
public int getSignature() {
return signature;
}
public void setSignature(int signature) {
this.signature = signature;
}
public int getValidDump() {
return validDump;
}
public void setValidDump(int validDump) {
this.validDump = validDump;
}
public long getDumpOptions() {
return dumpOptions;
}
public void setDumpOptions(long dumpOptions) {
this.dumpOptions = dumpOptions;
}
public long getHeaderSize() {
return headerSize;
}
public void setHeaderSize(long headerSize) {
this.headerSize = headerSize;
}
public long getBitmapSize() {
return bitmapSize;
}
public void setBitmapSize(long bitmapSize) {
this.bitmapSize = bitmapSize;
}
public long getPages() {
return pages;
}
public void setPages(long pages) {
this.pages = pages;
}
public byte[] getBuffer() {
return buffer;
}
public void setBuffer(byte[] buffer) {
this.buffer = buffer;
}
public Integer PFN2RVA(Integer pfn) {
return pfnToRva.get(pfn);
}
public Set<Integer> pfnKeySet() {
return pfnToRva.keySet();
}
}

View File

@ -0,0 +1,227 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.pagedump;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class KldrDataTableEntry implements StructConverter {
public final static String NAME = "_KLDR_DATA_TABLE_ENTRY";
private long List_Flink;
private long List_Blink;
//private long __Undefined1;
//private long __Undefined2;
//private long __Undefined3;
private long NonPagedDebugInfo;
private long DllBase;
private long EntryPoint;
private int SizeOfImage;
private long FullDllName;
private long BaseDllName;
private int Flags;
private short LoadCount;
//private short __Undefined5;
//private long __Undefined6;
private int CheckSum;
//private int __padding1;
private int TimeDateStamp;
//private int __padding2;
private DumpFileReader reader;
private long index;
private int psz;
KldrDataTableEntry(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
this.psz = reader.getPointerSize();
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(getIndex());
setList_Flink(reader.readNextPointer());
setList_Blink(reader.readNextPointer());
reader.readNextPointer();
reader.readNextPointer();
reader.readNextPointer();
setNonPagedDebugInfo(reader.readNextPointer());
setDllBase(reader.readNextPointer());
setEntryPoint(reader.readNextPointer());
setSizeOfImage(reader.readNextInt());
reader.readNextInt();
reader.readNextPointer();
setFullDllName(reader.readNextPointer());
reader.readNextPointer();
setBaseDllName(reader.readNextPointer());
setFlags(reader.readNextInt());
setLoadCount(reader.readNextShort());
reader.readNextShort();
reader.readNextPointer();
setCheckSum(reader.readNextInt());
reader.readNextInt();
setTimeDateStamp(reader.readNextInt());
reader.readNextInt();
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(POINTER, psz, "List.Flink", null);
struct.add(POINTER, psz, "List.Blink", null);
struct.add(POINTER, psz, "__Undefined1", null);
struct.add(POINTER, psz, "__Undefined2", null);
struct.add(POINTER, psz, "__Undefined3", null);
struct.add(POINTER, psz, "NonPagedDebugInfo", null);
struct.add(POINTER, psz, "DllBase", null);
struct.add(POINTER, psz, "EntryPoint", null);
struct.add(DWORD, 4, "SizeOfImage", null);
struct.add(DWORD, 4, "", null);
struct.add(POINTER, psz, "FullDllNameLen", null);
struct.add(POINTER, psz, "FullDllName", null);
struct.add(POINTER, psz, "BaseDllNameLen", null);
struct.add(POINTER, psz, "BaseDllName", null);
struct.add(DWORD, 4, "Flags", null);
struct.add(WORD, 2, "LoadCount", null);
struct.add(WORD, 2, "__Undefined5", null);
struct.add(POINTER, psz, "__Undefined6", null);
struct.add(DWORD, 4, "CheckSum", null);
struct.add(DWORD, 4, "__padding1", null);
struct.add(DWORD, 4, "TimeDateStamp", null);
struct.add(DWORD, 4, "__padding2", null);
struct.setCategoryPath(new CategoryPath("/PDMP"));
return struct;
}
public long getList_Flink() {
return List_Flink;
}
public void setList_Flink(long list_Flink) {
List_Flink = list_Flink;
}
public long getList_Blink() {
return List_Blink;
}
public void setList_Blink(long list_Blink) {
List_Blink = list_Blink;
}
public long getDllBase() {
return DllBase;
}
public void setDllBase(long dllBase) {
DllBase = dllBase;
}
public long getEntryPoint() {
return EntryPoint;
}
public void setEntryPoint(long entryPoint) {
EntryPoint = entryPoint;
}
public int getSizeOfImage() {
return SizeOfImage;
}
public void setSizeOfImage(int sizeOfImage) {
SizeOfImage = sizeOfImage;
}
public long getFullDllName() {
return FullDllName;
}
public void setFullDllName(long fullDllName) {
FullDllName = fullDllName;
}
public long getBaseDllName() {
return BaseDllName;
}
public void setBaseDllName(long baseDllName) {
BaseDllName = baseDllName;
}
public int getFlags() {
return Flags;
}
public void setFlags(int flags) {
Flags = flags;
}
public short getLoadCount() {
return LoadCount;
}
public void setLoadCount(short loadCount) {
LoadCount = loadCount;
}
public int getCheckSum() {
return CheckSum;
}
public void setCheckSum(int checkSum) {
CheckSum = checkSum;
}
public int getTimeDateStamp() {
return TimeDateStamp;
}
public void setTimeDateStamp(int timeDateStamp) {
TimeDateStamp = timeDateStamp;
}
public long getNonPagedDebugInfo() {
return NonPagedDebugInfo;
}
public void setNonPagedDebugInfo(long nonPagedDebugInfo) {
NonPagedDebugInfo = nonPagedDebugInfo;
}
public String getName() {
return "bob";
}
public long getIndex() {
return index;
}
}

View File

@ -0,0 +1,187 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.pagedump;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
public class LoadedDriver implements StructConverter {
public final static String NAME = "_KLDR_DATA_TABLE_ENTRY";
private int nameOffset;
private long dllBase;
private long entryPoint;
private long sizeOfImage;
private long fullDllName;
private long baseDllName;
private int flags;
private short loadCount;
private int checkSum;
private long buildFileHash;
private DumpFileReader reader;
private long index;
private int psz;
private boolean is32Bit;
LoadedDriver(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
this.psz = reader.getPointerSize();
this.is32Bit = psz == 4;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setNameOffset(reader.readNextInt());
int res0len = is32Bit ? 24 : 52;
reader.readNextAsciiString(res0len);
setDllBase(reader.readNextPointer());
setEntryPoint(reader.readNextPointer());
setSizeOfImage(reader.readNextPointer());
reader.readNextPointer();
setFullDllName(reader.readNextPointer());
reader.readNextPointer();
setBaseDllName(reader.readNextPointer());
setFlags(reader.readNextInt());
setLoadCount(reader.readNextShort());
reader.readNextShort();
reader.readNextPointer();
setCheckSum(reader.readNextInt());
reader.readNextInt();
setBuildFileHash(reader.readNextInt());
reader.readNextInt();
}
@Override
public DataType toDataType() {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "NameOffset", null);
struct.add(DWORD, 4, "", null);
struct.add(POINTER, psz, "InLoadOrderLinks.Flink", null);
struct.add(POINTER, psz, "InLoadOrderLinks.Blink", null);
struct.add(POINTER, psz, "ExceptionTable", null);
struct.add(POINTER, psz, "ExceptionTableSize", null);
struct.add(POINTER, psz, "GpValue", null);
struct.add(POINTER, psz, "NonPagedDebugInfo", null);
struct.add(POINTER, psz, "DllBase", null);
struct.add(POINTER, psz, "EntryPoint", null);
struct.add(is32Bit ? DWORD : QWORD, psz, "SizeOfImage", null);
struct.add(is32Bit ? DWORD : QWORD, psz, "", null);
struct.add(POINTER, psz, "FullDllName", null);
struct.add(is32Bit ? DWORD : QWORD, psz, "", null);
struct.add(POINTER, psz, "BaseDllName", null);
struct.add(DWORD, 4, "Flags", null);
struct.add(WORD, 2, "LoadCount", null);
struct.add(WORD, 2, "", null);
struct.add(POINTER, psz, "SectionPointer", null);
struct.add(is32Bit ? DWORD : QWORD, psz, "CheckSum", null);
struct.add(is32Bit ? DWORD : QWORD, psz, "BuildFileHash", null);
struct.setCategoryPath(new CategoryPath("/PDMP"));
return struct;
}
public int getNameOffset() {
return nameOffset;
}
public void setNameOffset(int nameOffset) {
this.nameOffset = nameOffset;
}
public long getDllBase() {
return dllBase;
}
public void setDllBase(long dllBase) {
this.dllBase = dllBase;
}
public long getEntryPoint() {
return entryPoint;
}
public void setEntryPoint(long entryPoint) {
this.entryPoint = entryPoint;
}
public long getSizeOfImage() {
return sizeOfImage;
}
public void setSizeOfImage(long sizeOfImage) {
this.sizeOfImage = sizeOfImage;
}
public long getBuildFileHash() {
return buildFileHash;
}
public void setBuildFileHash(long buildFileHash) {
this.buildFileHash = buildFileHash;
}
public long getFullDllName() {
return fullDllName;
}
public void setFullDllName(long fullDllName) {
this.fullDllName = fullDllName;
}
public long getBaseDllName() {
return baseDllName;
}
public void setBaseDllName(long baseDllName) {
this.baseDllName = baseDllName;
}
public int getFlags() {
return flags;
}
public void setFlags(int flags) {
this.flags = flags;
}
public short getLoadCount() {
return loadCount;
}
public void setLoadCount(short loadCount) {
this.loadCount = loadCount;
}
public int getCheckSum() {
return checkSum;
}
public void setCheckSum(int checkSum) {
this.checkSum = checkSum;
}
}

View File

@ -0,0 +1,119 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.pagedump;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class MmPfn implements StructConverter {
public final static String NAME = "PAGEDUMP_PHYS_MEM_RUN";
private long pteAddress;
private long origPte;
private long blink;
private long flags;
private int parent;
private DumpFileReader reader;
private long index;
private int psz;
MmPfn(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
this.psz = reader.getPointerSize();
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
reader.readNextPointer();
setPteAddress(reader.readNextPointer());
setOrigPte(reader.readNextPointer());
setBlink(reader.readNextPointer());
setFlags(reader.readNextPointer());
setParent(reader.readNextInt());
reader.readNextInt();
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(QWORD, psz, "ListEntry", null);
struct.add(QWORD, psz, "PteAddress", null);
struct.add(QWORD, psz, "OriginalPte", null);
struct.add(QWORD, psz, "", null);
struct.add(QWORD, psz, "", null);
struct.add(DWORD, 4, "Parent", null);
struct.add(DWORD, 4, "", null);
struct.setCategoryPath(new CategoryPath("/PDMP"));
return struct;
}
public long getPteAddress() {
return pteAddress;
}
public void setPteAddress(long pteAddress) {
this.pteAddress = pteAddress;
}
public int getParent() {
return parent;
}
public void setParent(int parent) {
this.parent = parent;
}
public long getOrigPte() {
return origPte;
}
public void setOrigPte(long origPte) {
this.origPte = origPte;
}
public long getBlink() {
return blink;
}
public void setBlink(long blink) {
this.blink = blink;
}
public long getFlags() {
return flags;
}
public void setFlags(long flags) {
this.flags = flags;
}
}

View File

@ -0,0 +1,609 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.pagedump;
import java.io.File;
import java.io.IOException;
import java.util.*;
import ghidra.app.util.Option;
import ghidra.app.util.OptionUtils;
import ghidra.app.util.bin.StructConverter;
import ghidra.app.util.bin.format.pdb2.pdbreader.*;
import ghidra.app.util.importer.MessageLog;
import ghidra.app.util.opinion.PeLoader;
import ghidra.app.util.pdb.pdbapplicator.*;
import ghidra.file.formats.dump.*;
import ghidra.file.formats.dump.cmd.ModuleToPeHelper;
import ghidra.framework.options.Options;
import ghidra.program.model.data.*;
import ghidra.program.model.listing.Program;
import ghidra.util.Msg;
import ghidra.util.exception.CancelledException;
import ghidra.util.exception.DuplicateNameException;
import ghidra.util.task.TaskMonitor;
public class Pagedump extends DumpFile {
public static long ETHREAD_PID_OFFSET; //TODO: Where do we want to get these from?
public static long ETHREAD_TID_OFFSET;
public final static int OFFSET_HEADER = 0x0;
public final static int OFFSET_TRIAGE = 0x1000;
public final static int DUMP_TYPE_UNKNOWN = 0x0;
public final static int DUMP_TYPE_FULL = 0x1;
public final static int DUMP_TYPE_SUMMARY = 0x2;
public final static int DUMP_TYPE_HEADER = 0x3;
public final static int DUMP_TYPE_TRIAGE = 0x4;
public final static int DUMP_TYPE_BITMAP_FULL = 0x5;
public final static int DUMP_TYPE_BITMAP_KERNEL = 0x6;
public final static int DUMP_TYPE_AUTOMATIC = 0x7;
public static final int SIGNATURE = 0x45474150; // "PAGE"
public static final int SIG_FULL = 0x504D4446; // "FDMP"
public static final int SIG_SUMMARY = 0x504D4453; // "SDMP"
public static final int SIG_VALID1 = 0x504D5444; // "DUMP"
public static final int SIG_VALID2 = 0x504D5444; // "DU64"
public static final int PAGE_SIZE = 0x1000;
public static final int MACHINE_TYPE_OFFSET32 = 0x20;
public static final int MACHINE_TYPE_OFFSET64 = 0x30;
public final static int TRIAGE_DUMP_CONTEXT = 0x1;
public final static int TRIAGE_DUMP_EXCEPTION = 0x2;
public final static int TRIAGE_DUMP_PRCB = 0x4;
public final static int TRIAGE_DUMP_PROCESS = 0x8;
public final static int TRIAGE_DUMP_THREAD = 0x10;
public final static int TRIAGE_DUMP_STACK = 0x20;
public final static int TRIAGE_DUMP_DRIVER_LIST = 0x40;
public final static int TRIAGE_DUMP_BROKEN_DRIVER = 0x80;
public final static int TRIAGE_DUMP_BASIC_INFO = 0xFF;
public final static int TRIAGE_DUMP_MMINFO = 0x100;
public final static int TRIAGE_DUMP_DATAPAGE = 0x200;
public final static int TRIAGE_DUMP_DEBUGGER_DATA = 0x400;
public final static int TRIAGE_DUMP_DATA_BLOCKS = 0x800;
PagedumpFileHeader header;
TriageDump triage;
SummaryHeader summary;
FullDumpHeader full;
private CategoryPath categoryPath = new CategoryPath("/ntkrnlmp.pdb");
private List<String> addins = new ArrayList<>();
private int base;
private long pfnDB;
Map<Integer, Long> pfnToVA = new HashMap<>();
//Map<Integer, Long> pfnToVAL = new HashMap<>();
Map<Long, Integer> VA2fileOffset = new HashMap<>();
protected long cr3;
private boolean createBlocks = true;
private boolean is32Bit = false;
private boolean isPAE = false;
public Pagedump(DumpFileReader reader, ProgramBasedDataTypeManager dtm, List<Option> options,
TaskMonitor monitor) throws IOException {
super(reader, dtm, options, monitor);
addins.add("ntoskrnl");
addins.add("ntkrnlmp");
Options props = program.getOptions(Program.PROGRAM_INFO);
props.setString("Executable Format", PeLoader.PE_NAME);
initManagerList(addins);
createBlocks =
OptionUtils.getBooleanOptionValue(DumpFileLoader.CREATE_MEMORY_BLOCKS_OPTION_NAME,
options, DumpFileLoader.CREATE_MEMORY_BLOCKS_OPTION_DEFAULT);
String pdbLocation =
OptionUtils.getOption(DumpFileLoader.DEBUG_DATA_PATH_OPTION_NAME, options,
DumpFileLoader.DEBUG_DATA_PATH_OPTION_DEFAULT);
if (!pdbLocation.equals("")) {
loadKernelPDB(pdbLocation, monitor);
}
header = new PagedumpFileHeader(reader, 0L, this);
cr3 = header.getDirectoryTableBase();
is32Bit = header.is32Bit();
isPAE = header.getPaeEnabled() != 0;
int hdrLen = header.toDataType().getLength();
addInteriorAddressObject("DumpHeader", 0, 0L, hdrLen);
data.add(new DumpData(0, header.toDataType()));
PhysicalMemoryDescriptor pmd = header.getPhysicalMemoryBlockBuffer();
if (pmd != null) {
loadPhysicalMemoryRuns(pmd);
}
DataType dt;
switch (header.getDumpType()) {
case DUMP_TYPE_FULL:
case DUMP_TYPE_BITMAP_FULL:
case DUMP_TYPE_BITMAP_KERNEL:
int signature = reader.readInt(hdrLen);
int offset = hdrLen;
switch (signature) {
case SIG_SUMMARY:
case SIG_FULL:
full = new FullDumpHeader(reader, hdrLen);
dt = full.toDataType();
data.add(new DumpData(hdrLen, dt));
data.add(new DumpData(full.getHeaderSize(), "Physical_Memory", 0));
offset = (int) full.getHeaderSize();
addInteriorAddressObject("DumpHeader", hdrLen, hdrLen,
offset - hdrLen);
if (createBlocks) {
mapPages(monitor);
}
walkPsLoadedModules();
break;
case SIG_VALID1:
reader.readNextInt();
break;
}
addInteriorAddressObject("Unknown", offset, offset,
reader.length() - offset);
break;
case DUMP_TYPE_TRIAGE:
triage = new TriageDump(reader, hdrLen);
dt = triage.toDataType();
data.add(new DumpData(hdrLen, dt));
addInteriorAddressObject("DumpHeader", hdrLen, hdrLen,
triage.getSizeOfDump());
int next = hdrLen + triage.getSizeOfDump();
addInteriorAddressObject("Unknown", next,
next, reader.length() - next);
buildKernelStructures();
break;
}
}
private void loadKernelPDB(String pdbLocation, TaskMonitor monitor) {
for (String key : addins) {
if (managerList.containsKey(key)) {
return;
}
}
File pdbFile = new File(pdbLocation);
if (!pdbFile.exists()) {
return;
}
PdbReaderOptions readerOptions = new PdbReaderOptions();
PdbApplicatorOptions applicatorOptions = new PdbApplicatorOptions();
applicatorOptions.setProcessingControl(PdbApplicatorControl.DATA_TYPES_ONLY);
try (AbstractPdb pdb = PdbParser.parse(pdbFile.getPath(), readerOptions, monitor)) {
monitor.setMessage("PDB: Parsing " + pdbFile + "...");
pdb.deserialize(monitor);
PdbApplicator applicator = new PdbApplicator(pdbFile.getPath(), pdb);
applicator.applyTo(program, dtm, program.getImageBase(),
applicatorOptions, monitor,
(MessageLog) null);
}
catch (PdbException | IOException | CancelledException e) {
Msg.error(this, e.getMessage());
}
}
private void loadPhysicalMemoryRuns(PhysicalMemoryDescriptor pmd) throws IOException {
PhysicalMemoryRun[] runs = pmd.getRuns();
int total = 1;
for (PhysicalMemoryRun run : runs) {
long runLength = run.getPageCount() * PAGE_SIZE;
boolean outOfBounds = runLength + total * PAGE_SIZE > reader.length();
long bound = (outOfBounds) ? (reader.length() - total * PAGE_SIZE) : runLength;
ArrayDataType adt =
new ArrayDataType(StructConverter.BYTE, (int) bound, 1);
data.add(new DumpData(total * PAGE_SIZE, adt));
// NB: Not sure if or where to place these
//addInteriorAddressObject(DumpFileLoader.LOCAL, total * PAGE_SIZE,
// run.getBasePage() * PAGE_SIZE, run.getPageCount() * PAGE_SIZE);
total += run.getPageCount();
if (outOfBounds)
break;
}
}
private void buildKernelStructures() throws IOException {
long offset;
if ((header.getMiniDumpFields() & Pagedump.TRIAGE_DUMP_MMINFO) > 0) {
offset = triage.getMmOffset();
reader.setPointerIndex(offset);
TriageStorage tstor = new TriageStorage(reader, reader.getPointerIndex());
data.add(new DumpData(offset, tstor.toDataType()));
}
if ((header.getMiniDumpFields() & Pagedump.TRIAGE_DUMP_PRCB) > 0) {
addDumpData(triage.getPrcbOffset(), "_KPRCB", categoryPath);
}
if ((header.getMiniDumpFields() & Pagedump.TRIAGE_DUMP_PROCESS) > 0) {
addDumpData(triage.getProcessOffset(), "_EPROCESS", categoryPath);
/*
processId = reader.readInt(triage.getThreadOffset() + ETHREAD_PID_OFFSET);
if (processId < 0)
processId = 0;
addProcess(processId, "TARGET", 0);
*/
}
if ((header.getMiniDumpFields() & Pagedump.TRIAGE_DUMP_THREAD) > 0) {
addDumpData(triage.getThreadOffset(), "_ETHREAD", categoryPath);
/*
threadId = reader.readInt(triage.getThreadOffset() + ETHREAD_TID_OFFSET);
if (threadId < 0)
threadId = 0;
addThread(processId, threadId, 0);
*/
}
ArrayDataType dt;
if ((header.getMiniDumpFields() & Pagedump.TRIAGE_DUMP_STACK) > 0) {
int psz = reader.getPointerSize();
offset = triage.getCallStackOffset();
DataType el = psz == 4 ? StructConverter.DWORD : StructConverter.QWORD;
dt = new ArrayDataType(el, triage.getCallStackSize() / psz, psz);
data.add(new DumpData(offset, dt, "CALL_STACK"));
}
if ((header.getMiniDumpFields() & Pagedump.TRIAGE_DUMP_DRIVER_LIST) > 0) {
offset = triage.getDriverListOffset();
reader.setPointerIndex(offset);
if (triage.getDriverCount() > 0) {
DataType ldt = null;
for (int i = 0; i < triage.getDriverCount(); i++) {
LoadedDriver ld = new LoadedDriver(reader, reader.getPointerIndex());
ldt = ld.toDataType();
int nameOffset = ld.getNameOffset();
String name = reader.readUnicodeString(nameOffset + 4);
addModule(name, ld.getDllBase(), i, ld.getSizeOfImage());
addExteriorAddressObject(name, 0, ld.getDllBase(), ld.getSizeOfImage());
}
dt = new ArrayDataType(ldt, triage.getDriverCount(), ldt.getLength());
data.add(new DumpData(offset, dt, "LOADED_DRIVERS"));
}
else {
data.add(new DumpData(offset, "LOADED_DRIVERS", 0));
}
}
if ((header.getMiniDumpFields() & Pagedump.TRIAGE_DUMP_BROKEN_DRIVER) > 0) {
offset = triage.getUnloadedDriversOffset();
reader.setPointerIndex(offset);
long count = reader.readNextLong();
StructureDataType uds = new StructureDataType("UNLOADED_DRIVERS", 0);
uds.add(StructConverter.QWORD, 8, "NumberOfUnloadedDrivers", null);
if (count > 0) {
DataType udt = null;
for (int i = 0; i < count; i++) {
UnloadedDriver ud = new UnloadedDriver(reader, reader.getPointerIndex());
udt = ud.toDataType();
if (ud.getStartAddress() != 0) {
addExteriorAddressObject(ud.getName(), 0, ud.getStartAddress(),
ud.getSize());
}
}
uds.add(new ArrayDataType(udt, (int) count, udt.getLength()),
udt.getLength() * (int) count,
"UnloadedDrivers", null);
}
data.add(new DumpData(offset, uds));
}
offset = triage.getStringPoolOffset();
long end = offset + triage.getStringPoolSize();
data.add(new DumpData(offset, "STRING_POOL", triage.getStringPoolSize()));
while (offset < end) {
int len = reader.readInt(offset);
data.add(new DumpData(offset, StructConverter.DWORD, "", false, false));
if (len == 0 || len == 0xFFFFFFFF)
break;
offset += 4;
DumpData dd = new DumpData(offset, new TerminatedUnicodeDataType(), "", false, false);
dd.setSize(len * 2 + 2);
data.add(dd);
offset = (offset + dd.getSize() + 7) / 8 * 8;
}
if ((header.getMiniDumpFields() & Pagedump.TRIAGE_DUMP_DEBUGGER_DATA) > 0) {
//addDumpData(triage.getDebuggerDataOffset(), "_KDDEBUGGER_DATA64", categoryPath);
offset = triage.getDebuggerDataOffset();
reader.setPointerIndex(offset);
KdDebuggerData kdd = new KdDebuggerData(reader, reader.getPointerIndex());
data.add(new DumpData(offset, kdd.toDataType()));
}
if (createBlocks && (header.getMiniDumpFields() & Pagedump.TRIAGE_DUMP_DATA_BLOCKS) > 0) {
offset = triage.getDataBlocksOffset();
reader.setPointerIndex(offset);
DataType db = null;
for (int i = 0; i < triage.getDataBlocksCount(); i++) {
TriageDataBlock tdb = new TriageDataBlock(reader, reader.getPointerIndex());
addInteriorAddressObject(DumpFileLoader.MEMORY, tdb.getOffset(),
tdb.getAddress(), tdb.getSize());
VA2fileOffset.put(tdb.getAddress(), tdb.getOffset());
db = tdb.toDataType();
}
if (db != null) {
if (triage.getDataBlocksCount() > 0) {
dt = new ArrayDataType(db, triage.getDataBlocksCount(), db.getLength());
data.add(new DumpData(offset, dt, "DATA_BLOCKS"));
}
}
}
if ((header.getMiniDumpFields() & Pagedump.TRIAGE_DUMP_CONTEXT) > 0) {
if (header.getContextOffset() > 0) {
CategoryPath path = new CategoryPath("/winnt.h");
DataType ctxt = getTypeFromArchive(path, "CONTEXT");
if (ctxt != null) {
setProgramContext(header.getContextOffset(), ctxt, "(active)");
}
}
}
}
private void walkPsLoadedModules() {
long listHead = header.getPsLoadedModuleList();
try {
long next = reader.readPointer(virtualToRva(listHead));
reader.setPointerIndex(virtualToRva(next));
List<Long> entryKeys = new ArrayList<>();
while (true) {
KldrDataTableEntry entry = new KldrDataTableEntry(reader, reader.getPointerIndex());
data.add(new DumpData(next, entry.toDataType()));
long namePtr = entry.getFullDllName();
if (namePtr != 0) {
long fileOffset = virtualToRva(namePtr);
String name = reader.readUnicodeString(fileOffset);
addExteriorAddressObject(name, 0, entry.getDllBase(),
entry.getSizeOfImage());
}
next = entry.getList_Flink();
if (entryKeys.contains(next)) {
break;
}
entryKeys.add(next);
reader.setPointerIndex(virtualToRva(next));
}
}
catch (IOException e) {
Msg.error(this, e.getMessage());
}
catch (DuplicateNameException e) {
Msg.error(this, "Duplicate name");
}
}
private void mapPages(TaskMonitor monitor) throws IOException {
base = (int) full.getHeaderSize();
walkPfnDB();
//monitor.setMessage("Walking page tables");
//monitor.initialize(512);
//walkPages((int) (cr3 >> 12), 0L, 0, false);
monitor.setMessage("Adding pages");
monitor.initialize(pfnToVA.keySet().size());
int count = 0;
for (Integer pfnx : pfnToVA.keySet()) {
Integer rva = full.PFN2RVA(pfnx);
if (rva == null) {
Msg.error(this, "no rva for " + Long.toHexString(pfnx));
continue;
}
Long addr = pfnToVA.get(pfnx);
addInteriorAddressObject(DumpFileLoader.MEMORY, fileOffset(pfnx), addr, 0x1000);
monitor.setProgress(count++);
}
/*
monitor.setMessage("Adding 1M pages");
monitor.initialize(pfnToVAL.keySet().size());
count = 0;
for (Integer pfnx : pfnToVAL.keySet()) {
Integer rva = full.PFN2RVA(pfnx);
if (rva == null) {
Msg.error(this, "no rva for " + Long.toHexString(pfnx));
continue;
}
Long addr = pfnToVAL.get(pfnx);
addInteriorAddressObject(DumpFileLoader.LOCAL, fileOffset(pfnx), addr, 0x100000);
monitor.setProgress(count++);
}
*/
monitor.setMessage("Pages added");
}
public PagedumpFileHeader getFileHeader() {
return header;
}
public TriageDump getTriageDump() {
return triage;
}
public boolean usesPreloadedLists() {
return header.getDumpType() != DUMP_TYPE_FULL;
}
public static String getMachineType(DumpFileReader reader) throws IOException {
PagedumpFileHeader header = new PagedumpFileHeader(reader, 0L);
return Integer.toHexString(header.getMachineImageType());
}
public void analyze(TaskMonitor monitor) {
boolean analyzeEmbeddedObjects =
OptionUtils.getBooleanOptionValue(DumpFileLoader.ANALYZE_EMBEDDED_OBJECTS_OPTION_NAME,
options,
false);
if (analyzeEmbeddedObjects) {
ModuleToPeHelper.queryModules(program, monitor);
}
}
private long valueAt(long l) {
try {
return reader.readLong(l);
}
catch (IOException e) {
Msg.error(this, e.getMessage());
return -1;
}
}
private void walkPfnDB() throws IOException {
pfnDB = header.getPfnTableBase();
for (Integer pfn : full.pfnKeySet()) {
long toRead = pfnDB + pfn * 0x30;
long rva = virtualToRva(toRead);
if (rva < 0) {
continue;
}
MmPfn pfnEntry = new MmPfn(reader, rva);
long pte = pfnEntry.getPteAddress();
long addr = (pte << 9) | 0xFFFF000000000000L;
pfnToVA.put(pfn, addr);
}
}
private long virtualToRva(long vaddr) {
if (triage != null) {
return VA2fileOffset.get(vaddr);
}
int tableHead = (int) (cr3 >> 12);
int shiftPTE = 12;
int shiftPDE = (is32Bit && !isPAE) ? shiftPTE + 10 : shiftPTE + 9;
int shiftPPE = (is32Bit && !isPAE) ? shiftPDE + 10 : shiftPDE + 9;
int shiftPXE = shiftPPE + 9;
int mask = (is32Bit && !isPAE) ? 0x3FF : 0x1FF;
long index = vaddr & 0xFFF;
long pte = (vaddr >> shiftPTE) & mask;
long pde = (vaddr >> shiftPDE) & mask;
long ppe = (vaddr >> shiftPPE) & mask;
long pxe = (vaddr >> shiftPXE) & mask;
int offpte = (int) (pte * 8);
int offpde = (int) (pde * 8);
int offppe = (int) (ppe * 8);
int offpxe = (int) (pxe * 8);
long valPXE = valueAt(fileOffset(tableHead) + offpxe);
int pfnPXE = valueToPfn(valPXE);
long rvaPXE = fileOffset(pfnPXE);
long valPPE = valueAt(rvaPXE + offppe);
int pfnPPE = valueToPfn(valPPE);
long rvaPPE = fileOffset(pfnPPE);
long valPDE = valueAt(rvaPPE + offpde);
int pfnPDE = valueToPfn(valPDE);
int flagsPDE = valueToFlags(valPDE);
long rvaPDE = fileOffset(pfnPDE);
boolean isLargePage = isLargePage(flagsPDE);
if (isLargePage) {
index = vaddr & 0x1FFFFF;
return rvaPDE + index;
}
long valPTE = valueAt(rvaPDE + offpte);
int pfnPTE = valueToPfn(valPTE);
long rvaPTE = fileOffset(pfnPTE);
return rvaPTE + index;
}
private long fileOffset(int pfn) {
Integer val = full.PFN2RVA(pfn);
if (val == null) {
return -1;
}
return ((long) val) * 0x1000 + base;
}
private int valueToPfn(long pfnEntry) {
return (int) ((pfnEntry >> 12) & 0xFFFFFFFF);
}
private int valueToFlags(long pfnEntry) {
return (int) (pfnEntry & 0xFFF);
}
private boolean isLargePage(int flags) {
return (flags & 0x80) > 0;
}
/*
private boolean isValid(int flags) {
return (flags & 0x1) > 0;
}
private void walkPages(int page, long va, int depth, boolean lp) throws IOException {
long fileOffset = fileOffset(page);
if (fileOffset < 0) {
return;
}
if (lp && depth == 3) {
long vai = va << 12;
vai |= 0xFFFF000000000000L;
pfnToVAL.put(page, vai);
return;
}
for (int i = 0; i < 0x200; i++) {
if (depth == 0)
monitor.setProgress(i);
long entry = reader.readLong(fileOffset + i * 8);
int pfn = valueToPfn(entry);
int flags = valueToFlags(entry);
boolean valid = isValid(flags);
boolean largePage = isLargePage(flags);
if (valid) {
long vai = (va | i) << 9;
if (depth < 3) {
walkPages(pfn, vai, depth + 1, largePage);
}
else {
Long rva = fileOffset(pfn);
if (rva > 0) {
if (!isLargePage(flags)) {
vai = vai << 3;
vai |= 0xFFFF000000000000L;
pfnToVA.put(pfn, vai);
}
else {
vai = vai << 12;
vai |= 0xFFFF000000000000L;
pfnToVAL.put(pfn, vai);
}
}
}
}
}
}
*/
}

View File

@ -0,0 +1,636 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.pagedump;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import generic.stl.Pair;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
public class PagedumpFileHeader implements StructConverter {
public final static String NAME = "PAGEDUMP_HEADER";
public final static int HEADER_SIZE = 0x1000;
public final static int DMP_PHYSICAL_MEMORY_BLOCK_SIZE_32 = 700;
public final static int DMP_PHYSICAL_MEMORY_BLOCK_SIZE_64 = 700;
public final static int DMP_CONTEXT_RECORD_SIZE_32 = 1200;
public final static int DMP_CONTEXT_RECORD_SIZE_64 = 3000;
public final static int DMP_HEADER_COMMENT_SIZE = 128;
public final static int DMP_RESERVED_0_SIZE_32 = 1760;
public final static int DMP_RESERVED_2_SIZE_32 = 16;
public final static int DMP_RESERVED_3_SIZE_32 = 56;
public final static int DMP_RESERVED_0_SIZE_64 = 4008;
public static int OFFSET_DUMP_TYPE = 0xF88;
private int signature;
private int validDump;
private int majorVersion;
private int minorVersion;
private long directoryTableBase;
private long pfnTableBase;
private long psLoadedModuleList;
private long psActiveProcessHead;
private int machineImageType;
private int numberOfProcessors;
private int bugCheckCode;
private long bugCheckParameter1;
private long bugCheckParameter2;
private long bugCheckParameter3;
private long bugCheckParameter4;
private byte[] versionUser = new byte[0x20];
private long kdDebuggerDataBlock;
protected PhysicalMemoryDescriptor pmd;
private int dumpType;
private int miniDumpFields;
private int secondaryDataState;
private int productType;
private int suiteMask;
private int writerStatus;
private int paeEnabled;
private int kdSecondaryVersion;
private int attributes;
private int bootId;
private long requiredDumpSpace;
private long systemUpTime;
private long systemTime;
protected List<Pair<Integer, DataType>> delayedAdds = new ArrayList<Pair<Integer, DataType>>();
protected DumpFileReader reader;
protected long index;
private Pagedump pd;
private int psz;
private int pad = Pagedump.SIGNATURE; // "PAGE"
private int padSize = 4;
private boolean is32Bit;
private long contextOffset;
PagedumpFileHeader(DumpFileReader reader, long index, Pagedump pd) throws IOException {
this.reader = reader;
this.index = index;
this.pd = pd;
this.psz = reader.getPointerSize();
is32Bit = psz == 4;
parse();
}
PagedumpFileHeader(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parseLight();
}
private void parseLight() throws IOException {
reader.setPointerIndex(index);
setSignature(reader.readNextInt());
setValidDump(reader.readNextInt());
int valid = getValidDump();
psz = (valid == Pagedump.SIG_VALID1) ? 32 : 64;
reader.setPointerSize(psz);
setMajorVersion(reader.readNextInt());
setMinorVersion(reader.readNextInt());
setDirectoryTableBase(reader.readNextPointer());
setPfnTableBase(reader.readNextPointer());
setPsLoadedModuleList(reader.readNextPointer());
setPsActiveProcessHead(reader.readNextPointer());
setMachineImageType(reader.readNextInt());
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setSignature(reader.readNextInt());
setValidDump(reader.readNextInt());
setMajorVersion(reader.readNextInt());
setMinorVersion(reader.readNextInt());
setDirectoryTableBase(reader.readNextPointer());
setPfnTableBase(reader.readNextPointer());
setPsLoadedModuleList(reader.readNextPointer());
setPsActiveProcessHead(reader.readNextPointer());
setMachineImageType(reader.readNextInt());
setNumberOfProcessors(reader.readNextInt());
setBugCheckCode(reader.readNextInt());
reader.readNextInt();
setBugCheckParameter1(reader.readNextPointer());
setBugCheckParameter2(reader.readNextPointer());
setBugCheckParameter3(reader.readNextPointer());
setBugCheckParameter4(reader.readNextPointer());
for (int i = 0; i < versionUser.length; i++) {
versionUser[i] = reader.readNextByte();
}
if (is32Bit()) {
setPaeEnabled(reader.readNextByte());
setKdSecondaryVersion(reader.readNextByte());
reader.readNextByte();
reader.readNextByte();
}
setKdDebuggerDataBlock(reader.readNextPointer());
long offsetPMD = reader.getPointerIndex();
long pmdSize =
is32Bit() ? DMP_PHYSICAL_MEMORY_BLOCK_SIZE_32 : DMP_PHYSICAL_MEMORY_BLOCK_SIZE_64;
long ctxtSize = is32Bit() ? DMP_CONTEXT_RECORD_SIZE_32 : DMP_CONTEXT_RECORD_SIZE_64;
long offset = offsetPMD + pmdSize + padSize + ctxtSize;
CategoryPath path = new CategoryPath("/winnt.h");
DataType dt =
pd.getTypeFromArchive(path, is32Bit() ? "EXCEPTION_RECORD32" : "EXCEPTION_RECORD64");
if (dt != null) {
offset += dt.getLength();
}
else {
offset += is32Bit() ? 0x54 : 0x98; // ExceptionRecord
}
if (is32Bit()) {
offset += DMP_HEADER_COMMENT_SIZE;
reader.setPointerIndex(offset);
int val = reader.readNextInt();
if (val != pad) {
setAttributes(val);
}
val = reader.readNextInt();
if (val != pad) {
setAttributes(val);
}
offset = reader.getPointerIndex();
offset += DMP_CONTEXT_RECORD_SIZE_32;
}
reader.setPointerIndex(offset);
OFFSET_DUMP_TYPE = (int) offset;
setDumpType(reader.readNextInt());
reader.readNextInt(); // pad
if (!is32Bit()) {
setRequiredDumpSpace(reader.readNextLong());
setSystemTime(reader.readNextLong());
for (int i = 0; i < DMP_HEADER_COMMENT_SIZE; i++) {
reader.readNextByte();
}
setSystemUpTime(reader.readNextLong());
}
int val = reader.readNextInt();
if (val != pad) {
setMiniDumpFields(val);
}
val = reader.readNextInt();
if (val != pad) {
setSecondaryDataState(val);
}
val = reader.readNextInt();
if (val != pad) {
setProductType(val);
}
val = reader.readNextInt();
if (val != pad) {
setSuiteMask(val);
}
val = reader.readNextInt();
if (val != pad) {
setWriterStatus(val);
}
if (is32Bit()) {
setRequiredDumpSpace(reader.readNextLong());
for (int i = 0; i < DMP_RESERVED_0_SIZE_32; i++) {
reader.readNextByte();
}
setSystemUpTime(reader.readNextLong());
setSystemTime(reader.readNextLong());
for (int i = 0; i < DMP_RESERVED_0_SIZE_32; i++) {
reader.readNextByte();
}
}
else {
val = reader.readNextInt();
if (val != pad) {
setKdSecondaryVersion(val);
}
val = reader.readNextInt();
if (val != pad) {
setAttributes(val);
}
val = reader.readNextInt();
if (val != pad) {
setBootId(val);
}
for (int i = 0; i < DMP_RESERVED_0_SIZE_64; i++) {
reader.readNextByte();
}
}
if (dumpType != Pagedump.DUMP_TYPE_TRIAGE ||
(miniDumpFields & Pagedump.TRIAGE_DUMP_DATA_BLOCKS) > 0) {
val = reader.readInt(offsetPMD);
if (val != pad) {
pmd = new PhysicalMemoryDescriptor(reader, offsetPMD);
}
}
}
public long getContextOffset() {
return contextOffset;
}
@Override
public DataType toDataType() {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(STRING, 4, "Signature", null);
struct.add(STRING, 4, "ValidDump", null);
struct.add(DWORD, 4, "MajorVersion", null);
struct.add(DWORD, 4, "MinorVersion", null);
struct.add(POINTER, psz, "DirectoryTableBase", null);
struct.add(POINTER, psz, "PfnTableBase", null);
struct.add(POINTER, psz, "PsLoadedModuleList", null);
struct.add(POINTER, psz, "PsActiveProcessHead", null);
struct.add(DWORD, 4, "MachineImageType", null);
struct.add(DWORD, 4, "NumberOfProcessors", null);
struct.add(DWORD, 4, "BugCheckCode", null);
struct.add(STRING, 4, "__unusedAlignment", null);
struct.add(POINTER, psz, "BugCheckParameter1", null);
struct.add(POINTER, psz, "BugCheckParameter2", null);
struct.add(POINTER, psz, "BugCheckParameter3", null);
struct.add(POINTER, psz, "BugCheckParameter4", null);
struct.add(STRING, versionUser.length, "VersionUser", null);
if (is32Bit()) {
struct.add(BYTE, 1, "PaeEnabled", null);
struct.add(BYTE, 1, "KdSecondaryVersion", null);
struct.add(BYTE, 1, "", null);
struct.add(BYTE, 1, "", null);
}
struct.add(POINTER, psz, "KdDebuggerDataBlock", null);
long pmdSize =
is32Bit() ? DMP_PHYSICAL_MEMORY_BLOCK_SIZE_32 : DMP_PHYSICAL_MEMORY_BLOCK_SIZE_64;
struct.add(STRING, (int) pmdSize, "PhysicalMemoryBlock", null);
struct.add(STRING, 4, "__unusedAlignment", null);
CategoryPath path = new CategoryPath("/winnt.h");
long ctxtSize = is32Bit() ? DMP_CONTEXT_RECORD_SIZE_32 : DMP_CONTEXT_RECORD_SIZE_64;
DataType dt = pd.getTypeFromArchive(path, "CONTEXT");
if (dt != null) {
contextOffset = struct.getLength();
struct.add(dt, dt.getLength(), "ContextRecord", null);
struct.add(STRING, (int) ctxtSize - dt.getLength(), "__unusedAlignment", null);
}
dt = pd.getTypeFromArchive(path, is32Bit() ? "EXCEPTION_RECORD32" : "EXCEPTION_RECORD64");
if (dt != null) {
struct.add(dt, dt.getLength(), "ExceptionRecord", null);
}
else {
struct.add(DWORD, 4, "ExceptionCode", null);
struct.add(DWORD, 4, "ExceptionFlags", null);
struct.add(POINTER, psz, "ExceptionRecord", null);
struct.add(POINTER, psz, "ExceptionAddress", null);
struct.add(DWORD, 4, "NumberParameters", null);
struct.add(STRING, 4, "__unusedAlignment", null);
ArrayDataType eiDt = new ArrayDataType(POINTER, 15, psz);
struct.add(eiDt, eiDt.getLength(), "ExceptionInformation", null);
}
if (is32Bit()) {
struct.add(STRING, DMP_HEADER_COMMENT_SIZE, "Comment", null);
dt = getAttributes() == 0 ? STRING : DWORD;
struct.add(QWORD, 8, "Attributes", null);
dt = getBootId() == 0 ? STRING : DWORD;
struct.add(DWORD, 4, "BootId", null);
struct.add(STRING, DMP_RESERVED_0_SIZE_32, "_reserved0", null);
}
struct.growStructure(OFFSET_DUMP_TYPE - struct.getLength());
struct.add(DWORD, 4, "DumpType", null);
struct.add(STRING, 4, "__unusedAlignment", null);
if (!is32Bit()) {
struct.add(QWORD, 8, "RequiredDumpSpace", null);
struct.add(QWORD, 8, "SystemTime", null);
struct.add(STRING, DMP_HEADER_COMMENT_SIZE, "Comment", null);
struct.add(QWORD, 8, "SystemUpTime", null);
}
dt = miniDumpFields == 0 ? STRING : DWORD;
struct.add(dt, 4, "MiniDumpFields", null);
dt = secondaryDataState == 0 ? STRING : DWORD;
struct.add(dt, 4, "SecondaryDataState", null);
dt = productType == 0 ? STRING : DWORD;
struct.add(dt, 4, "ProductType", null);
dt = suiteMask == 0 ? STRING : DWORD;
struct.add(dt, 4, "SuiteMask", null);
dt = getWriterStatus() == 0 ? STRING : DWORD;
struct.add(dt, 4, "WriterStatus", null);
if (is32Bit()) {
struct.add(QWORD, 8, "RequiredDumpSpace", null);
struct.add(STRING, DMP_RESERVED_2_SIZE_32, "_reserved2", null);
struct.add(QWORD, 8, "SystemUpTime", null);
struct.add(QWORD, 8, "SystemTime", null);
struct.add(STRING, DMP_RESERVED_3_SIZE_32, "_reserved3", null);
}
else {
dt = getKdSecondaryVersion() == 0 ? STRING : DWORD;
struct.add(dt, 4, "KdSecondaryVersion", null);
dt = getAttributes() == 0 ? STRING : DWORD;
struct.add(dt, 4, "Attributes", null);
dt = getBootId() == 0 ? STRING : DWORD;
struct.add(dt, 4, "BootId", null);
struct.add(STRING, DMP_RESERVED_0_SIZE_64, "_reserved0", null);
}
//replace(struct, BYTE, OFFSET_PAE_ENABLED, "PaeEnabled");
//if ((miniDumpFields & Pagedump.TRIAGE_DUMP_DATA_BLOCKS) > 0 && pmd.getNumberOfRuns() > 0) {
/*
struct.growStructure(HEADER_SIZE - struct.getLength());
if (pmd != null && pmd.getNumberOfRuns() > 0) {
replace(struct, pmd.toDataType(), OFFSET_PHYS_MEM, "PhysicalMemoryBlockBuffer");
}
for (Pair<Integer, DataType> pair : delayedAdds) {
replace(struct, pair.second, pair.first, pair.second.getDisplayName());
}
*/
struct.setCategoryPath(new CategoryPath("/PDMP"));
return struct;
}
public void addToDataType(DataType dt, long offset) {
delayedAdds.add(new Pair<Integer, DataType>((int) offset, dt));
}
public int getSignature() {
return signature;
}
public void setSignature(int signature) {
this.signature = signature;
}
public int getValidDump() {
return validDump;
}
public void setValidDump(int validDump) {
this.validDump = validDump;
}
public int getMajorVersion() {
return majorVersion;
}
public void setMajorVersion(int majorVersion) {
this.majorVersion = majorVersion;
}
public int getMinorVersion() {
return minorVersion;
}
public void setMinorVersion(int minorVersion) {
this.minorVersion = minorVersion;
}
public long getDirectoryTableBase() {
return directoryTableBase;
}
public void setDirectoryTableBase(long directoryTableBase) {
this.directoryTableBase = directoryTableBase;
}
public long getPfnTableBase() {
return pfnTableBase;
}
public void setPfnTableBase(long pfnTableBase) {
this.pfnTableBase = pfnTableBase;
}
public long getPsLoadedModuleList() {
return psLoadedModuleList;
}
public void setPsLoadedModuleList(long psLoadedModuleList) {
this.psLoadedModuleList = psLoadedModuleList;
}
public long getPsActiveProcessHead() {
return psActiveProcessHead;
}
public void setPsActiveProcessHead(long psActiveProcessHead) {
this.psActiveProcessHead = psActiveProcessHead;
}
public int getMachineImageType() {
return machineImageType;
}
public void setMachineImageType(int machineImageType) {
this.machineImageType = machineImageType;
}
public int getNumberOfProcessors() {
return numberOfProcessors;
}
public void setNumberOfProcessors(int numberOfProcessors) {
this.numberOfProcessors = numberOfProcessors;
}
public PhysicalMemoryDescriptor getPhysicalMemoryBlockBuffer() {
return pmd;
}
public void setPhysicalMemoryBlockBuffer(PhysicalMemoryDescriptor pmd) {
this.pmd = pmd;
}
public int getDumpType() {
return dumpType;
}
public void setDumpType(int dumpType) {
this.dumpType = dumpType;
}
public long getRequiredDumpSpace() {
return requiredDumpSpace;
}
public void setRequiredDumpSpace(long requiredDumpSpace) {
this.requiredDumpSpace = requiredDumpSpace;
}
public long getSystemUpTime() {
return systemUpTime;
}
public void setSystemUpTime(long systemUpTime) {
this.systemUpTime = systemUpTime;
}
public long getSystemTime() {
return systemTime;
}
public void setSystemTime(long systemTime) {
this.systemTime = systemTime;
}
public int getBugCheckCode() {
return bugCheckCode;
}
public void setBugCheckCode(int bugCheckCode) {
this.bugCheckCode = bugCheckCode;
}
public long getBugCheckParameter1() {
return bugCheckParameter1;
}
public void setBugCheckParameter1(long bugCheckParameter1) {
this.bugCheckParameter1 = bugCheckParameter1;
}
public long getBugCheckParameter2() {
return bugCheckParameter2;
}
public void setBugCheckParameter2(long bugCheckParameter2) {
this.bugCheckParameter2 = bugCheckParameter2;
}
public long getBugCheckParameter3() {
return bugCheckParameter3;
}
public void setBugCheckParameter3(long bugCheckParameter3) {
this.bugCheckParameter3 = bugCheckParameter3;
}
public long getBugCheckParameter4() {
return bugCheckParameter4;
}
public void setBugCheckParameter4(long bugCheckParameter4) {
this.bugCheckParameter4 = bugCheckParameter4;
}
public long getKdDebuggerDataBlock() {
return kdDebuggerDataBlock;
}
public void setKdDebuggerDataBlock(long kdDebuggerDataBlock) {
this.kdDebuggerDataBlock = kdDebuggerDataBlock;
}
public int getMiniDumpFields() {
return miniDumpFields;
}
public void setMiniDumpFields(int miniDumpFields) {
this.miniDumpFields = miniDumpFields;
}
public int getSecondaryDataState() {
return secondaryDataState;
}
public void setSecondaryDataState(int secondaryDataState) {
this.secondaryDataState = secondaryDataState;
}
public int getProductType() {
return productType;
}
public void setProductType(int productType) {
this.productType = productType;
}
public int getSuiteMask() {
return suiteMask;
}
public void setSuiteMask(int suiteMask) {
this.suiteMask = suiteMask;
}
public int getWriterStatus() {
return writerStatus;
}
public void setWriterStatus(int writerStatus) {
this.writerStatus = writerStatus;
}
public int getKdSecondaryVersion() {
return kdSecondaryVersion;
}
public void setKdSecondaryVersion(int kdSecondaryVersion) {
this.kdSecondaryVersion = kdSecondaryVersion;
}
public int getAttributes() {
return attributes;
}
public void setAttributes(int attributes) {
this.attributes = attributes;
}
public int getBootId() {
return bootId;
}
public void setBootId(int bootId) {
this.bootId = bootId;
}
public boolean is32Bit() {
return is32Bit;
}
public int getPaeEnabled() {
return paeEnabled;
}
public void setPaeEnabled(int paeEnabled) {
this.paeEnabled = paeEnabled;
}
}

View File

@ -0,0 +1,99 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.pagedump;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class PhysicalMemoryDescriptor implements StructConverter {
public final static String NAME = "PAGEDUMP_PHYS_MEMORY_DESCRIPTOR";
private int numberOfRuns;
private long numberOfPages;
private PhysicalMemoryRun[] runs;
private DumpFileReader reader;
private long index;
PhysicalMemoryDescriptor(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
int nruns = (int) reader.readNextPointer();
if (nruns == Pagedump.SIGNATURE) {
setNumberOfRuns(0);
return;
}
setNumberOfRuns(nruns);
setNumberOfPages(reader.readNextPointer());
runs = new PhysicalMemoryRun[numberOfRuns];
for (int i = 0; i < numberOfRuns; i++) {
setRuns(new PhysicalMemoryRun(reader, reader.getPointerIndex()), i);
}
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "NumberOfRuns", null);
struct.add(DWORD, 4, "NumberOfPages", null);
DataType t = runs[0].toDataType();
ArrayDataType a = new ArrayDataType(t, numberOfRuns, t.getLength());
struct.add(a, a.getLength(), "Runs", null);
struct.setCategoryPath(new CategoryPath("/PDMP"));
return struct;
}
public int getNumberOfRuns() {
return numberOfRuns;
}
public void setNumberOfRuns(int numberOfRuns) {
this.numberOfRuns = numberOfRuns;
}
public long getNumberOfPages() {
return numberOfPages;
}
public void setNumberOfPages(long numberOfPages) {
this.numberOfPages = numberOfPages;
}
public PhysicalMemoryRun[] getRuns() {
return runs;
}
public void setRuns(PhysicalMemoryRun run, int index) {
this.runs[index] = run;
}
}

View File

@ -0,0 +1,79 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.pagedump;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class PhysicalMemoryRun implements StructConverter {
public final static String NAME = "PAGEDUMP_PHYS_MEM_RUN";
private long basePage;
private long pageCount;
private DumpFileReader reader;
private long index;
PhysicalMemoryRun(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setBasePage(reader.readNextPointer());
setPageCount(reader.readNextPointer());
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "BasePage", null);
struct.add(DWORD, 4, "PageCount", null);
struct.setCategoryPath(new CategoryPath("/PDMP"));
return struct;
}
public long getBasePage() {
return basePage;
}
public void setBasePage(long basePage) {
this.basePage = basePage;
}
public long getPageCount() {
return pageCount;
}
public void setPageCount(long pageCount) {
this.pageCount = pageCount;
}
}

View File

@ -0,0 +1,153 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.pagedump;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class SummaryHeader implements StructConverter {
public final static String NAME = "PAGEDUMP_SUMMARY";
public static final int SIGNATURE = 0x45474150; // "PAGE"
private int signature;
private int validDump;
private int dumpOptions;
private int headerSize;
private int bitmapSize;
private int pages;
private int sizeOfBitMap;
private int[] buffer;
private DumpFileReader reader;
private long index;
SummaryHeader(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setSignature(reader.readNextInt());
setValidDump(reader.readNextInt());
setDumpOptions(reader.readNextInt());
setHeaderSize(reader.readNextInt());
setBitmapSize(reader.readNextInt());
setSizeOfBitMap(reader.readNextInt());
reader.readNextLong();
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(STRING, 4, "Signature", null);
struct.add(STRING, 4, "ValidDump", null);
struct.add(DWORD, 4, "DumpOptions", null);
struct.add(DWORD, 4, "HeaderSize", null);
struct.add(DWORD, 4, "BitmapSize", null);
struct.add(DWORD, 4, "Pages", null);
StructureDataType s0 = new StructureDataType("RTL_BITMAP", 0);
s0.add(DWORD, 4, "SizeOfBitMap", null);
if (sizeOfBitMap > 0) {
s0.add(QWORD, 8, "", null);
ArrayDataType a = new ArrayDataType(BYTE, sizeOfBitMap, 1);
s0.add(a, a.getLength(), "Buffer", null);
}
struct.add(s0, s0.getLength(), "BitMap", null);
struct.setCategoryPath(new CategoryPath("/PDMP"));
return struct;
}
public int getSignature() {
return signature;
}
public void setSignature(int signature) {
this.signature = signature;
}
public int getValidDump() {
return validDump;
}
public void setValidDump(int validDump) {
this.validDump = validDump;
}
public int getDumpOptions() {
return dumpOptions;
}
public void setDumpOptions(int dumpOptions) {
this.dumpOptions = dumpOptions;
}
public int getHeaderSize() {
return headerSize;
}
public void setHeaderSize(int headerSize) {
this.headerSize = headerSize;
}
public int getBitmapSize() {
return bitmapSize;
}
public void setBitmapSize(int bitmapSize) {
this.bitmapSize = bitmapSize;
}
public int getPages() {
return pages;
}
public void setPages(int pages) {
this.pages = pages;
}
public int getSizeOfBitMap() {
return sizeOfBitMap;
}
public void setSizeOfBitMap(int sizeOfBitMap) {
this.sizeOfBitMap = sizeOfBitMap;
}
public int[] getBuffer() {
return buffer;
}
public void setBuffer(int[] buffer) {
this.buffer = buffer;
}
}

View File

@ -0,0 +1,90 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.pagedump;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
public class TriageDataBlock implements StructConverter {
public final static String NAME = "_TRIAGE_DATA_BLOCK";
private long address;
private int offset;
private int size;
private DumpFileReader reader;
private long index;
private int psz;
TriageDataBlock(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
this.psz = reader.getPointerSize();
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setAddress(reader.readNextPointer());
setOffset(reader.readNextInt());
setSize(reader.readNextInt());
}
@Override
public DataType toDataType() {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(POINTER, psz, "Address", null);
struct.add(DWORD, 4, "Offset", null);
struct.add(DWORD, 4, "Size", null);
struct.setCategoryPath(new CategoryPath("/PDMP"));
return struct;
}
public long getAddress() {
return address;
}
public void setAddress(long address) {
this.address = address;
}
public int getOffset() {
return offset;
}
public void setOffset(int offset) {
this.offset = offset;
}
public int getSize() {
return size;
}
public void setSize(int size) {
this.size = size;
}
}

View File

@ -0,0 +1,382 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.pagedump;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
public class TriageDump implements StructConverter {
public final static String NAME = "PAGEDUMP_TRIAGE";
private int servicePackBuild;
private int sizeOfDump;
private int validOffset;
private int contextOffset;
private int exceptionOffset;
private int mmOffset;
private int unloadedDriversOffset;
private int prcbOffset;
private int processOffset;
private int threadOffset;
private int callStackOffset;
private int callStackSize;
private int driverListOffset;
private int driverCount;
private int stringPoolOffset;
private int stringPoolSize;
private int brokenDriverOffset;
private int triageOptions;
private long topOfStack;
private int bStoreOffset;
private int bStoreSize;
private long bStoreLimit;
private long dataPageAddress;
private int dataPageOffset;
private int dataPageSize;
private int debuggerDataOffset;
private int debuggerDataSize;
private int dataBlocksOffset;
private int dataBlocksCount;
private DumpFileReader reader;
private long index;
private int psz;
private boolean is32Bit;
TriageDump(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
this.psz = reader.getPointerSize();
this.is32Bit = psz == 4;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setServicePackBuild(reader.readNextInt());
setSizeOfDump(reader.readNextInt());
setValidOffset(reader.readNextInt());
setContextOffset(reader.readNextInt());
setExceptionOffset(reader.readNextInt());
setMmOffset(reader.readNextInt());
setUnloadedDriversOffset(reader.readNextInt());
setPrcbOffset(reader.readNextInt());
setProcessOffset(reader.readNextInt());
setThreadOffset(reader.readNextInt());
setCallStackOffset(reader.readNextInt());
setCallStackSize(reader.readNextInt());
setDriverListOffset(reader.readNextInt());
setDriverCount(reader.readNextInt());
setStringPoolOffset(reader.readNextInt());
setStringPoolSize(reader.readNextInt());
setBrokenDriverOffset(reader.readNextInt());
setTriageOptions(reader.readNextInt());
setTopOfStack(reader.readNextPointer());
setBStoreOffset(reader.readNextInt());
setBStoreSize(reader.readNextInt());
setBStoreLimit(reader.readNextPointer());
if (!is32Bit) {
setDataPageAddress(reader.readNextLong());
setDataPageOffset(reader.readNextInt());
setDataPageSize(reader.readNextInt());
}
setDebuggerDataOffset(reader.readNextInt());
setDebuggerDataSize(reader.readNextInt());
setDataBlocksOffset(reader.readNextInt());
setDataBlocksCount(reader.readNextInt());
}
@Override
public DataType toDataType() {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "ServicePackBuild", null);
struct.add(DWORD, 4, "SizeOfDump", null);
struct.add(DWORD, 4, "ValidOffset", null);
struct.add(Pointer32DataType.dataType, 4, "ContextOffset", null);
struct.add(Pointer32DataType.dataType, 4, "ExceptionOffset", null);
struct.add(Pointer32DataType.dataType, 4, "MmOffset", null);
struct.add(Pointer32DataType.dataType, 4, "UnloadedDriversOffset", null);
struct.add(Pointer32DataType.dataType, 4, "PrcbOffset", null);
struct.add(Pointer32DataType.dataType, 4, "ProcessOffset", null);
struct.add(Pointer32DataType.dataType, 4, "ThreadOffset", null);
struct.add(Pointer32DataType.dataType, 4, "CallStackOffset", null);
struct.add(DWORD, 4, "CallStackSize", null);
struct.add(Pointer32DataType.dataType, 4, "DriverListOffset", null);
struct.add(DWORD, 4, "DriverCount", null);
struct.add(Pointer32DataType.dataType, 4, "StringPoolOffset", null);
struct.add(DWORD, 4, "StringPoolSize", null);
struct.add(Pointer32DataType.dataType, 4, "BrokenDriverOffset", null);
struct.add(DWORD, 4, "TriageOptions", null);
struct.add(POINTER, psz, "TopOfStack", null);
struct.add(Pointer32DataType.dataType, 4, "BStoreOffset", null);
struct.add(DWORD, 4, "BStoreSize", null);
struct.add(POINTER, psz, "BStoreLimit", null);
if (!is32Bit) {
struct.add(POINTER, psz, "DataPageAddress", null);
struct.add(Pointer32DataType.dataType, 4, "DataPageOffset", null);
struct.add(DWORD, 4, "DataPageSize", null);
}
struct.add(Pointer32DataType.dataType, 4, "DebuggerDataOffset", null);
struct.add(DWORD, 4, "DebuggerDataSize", null);
struct.add(Pointer32DataType.dataType, 4, "DataBlocksOffset", null);
struct.add(DWORD, 4, "DataBlocksCount", null);
struct.setCategoryPath(new CategoryPath("/PDMP"));
return struct;
}
public int getServicePackBuild() {
return servicePackBuild;
}
public void setServicePackBuild(int servicePackBuild) {
this.servicePackBuild = servicePackBuild;
}
public int getSizeOfDump() {
return sizeOfDump;
}
public void setSizeOfDump(int sizeOfDump) {
this.sizeOfDump = sizeOfDump;
}
public int getValidOffset() {
return validOffset;
}
public void setValidOffset(int validOffset) {
this.validOffset = validOffset;
}
public int getContextOffset() {
return contextOffset;
}
public void setContextOffset(int contextOffset) {
this.contextOffset = contextOffset;
}
public int getExceptionOffset() {
return exceptionOffset;
}
public void setExceptionOffset(int exceptionOffset) {
this.exceptionOffset = exceptionOffset;
}
public int getMmOffset() {
return mmOffset;
}
public void setMmOffset(int mmOffset) {
this.mmOffset = mmOffset;
}
public int getUnloadedDriversOffset() {
return unloadedDriversOffset;
}
public void setUnloadedDriversOffset(int unloadedDriversOffset) {
this.unloadedDriversOffset = unloadedDriversOffset;
}
public int getPrcbOffset() {
return prcbOffset;
}
public void setPrcbOffset(int prcbOffset) {
this.prcbOffset = prcbOffset;
}
public int getProcessOffset() {
return processOffset;
}
public void setProcessOffset(int processOffset) {
this.processOffset = processOffset;
}
public int getThreadOffset() {
return threadOffset;
}
public void setThreadOffset(int threadOffset) {
this.threadOffset = threadOffset;
}
public int getCallStackOffset() {
return callStackOffset;
}
public void setCallStackOffset(int callStackOffset) {
this.callStackOffset = callStackOffset;
}
public int getCallStackSize() {
return callStackSize;
}
public void setCallStackSize(int callStackSize) {
this.callStackSize = callStackSize;
}
public int getDriverListOffset() {
return driverListOffset;
}
public void setDriverListOffset(int driverListOffset) {
this.driverListOffset = driverListOffset;
}
public int getDriverCount() {
return driverCount;
}
public void setDriverCount(int driverCount) {
this.driverCount = driverCount;
}
public int getStringPoolOffset() {
return stringPoolOffset;
}
public void setStringPoolOffset(int stringPoolOffset) {
this.stringPoolOffset = stringPoolOffset;
}
public int getStringPoolSize() {
return stringPoolSize;
}
public void setStringPoolSize(int stringPoolSize) {
this.stringPoolSize = stringPoolSize;
}
public int getBrokenDriverOffset() {
return brokenDriverOffset;
}
public void setBrokenDriverOffset(int brokenDriverOffset) {
this.brokenDriverOffset = brokenDriverOffset;
}
public int getTriageOptions() {
return triageOptions;
}
public void setTriageOptions(int triageOptions) {
this.triageOptions = triageOptions;
}
public long getTopOfStack() {
return topOfStack;
}
public void setTopOfStack(long topOfStack) {
this.topOfStack = topOfStack;
}
public int getDebuggerDataOffset() {
return debuggerDataOffset;
}
public void setDebuggerDataOffset(int debuggerDataOffset) {
this.debuggerDataOffset = debuggerDataOffset;
}
public int getDebuggerDataSize() {
return debuggerDataSize;
}
public void setDebuggerDataSize(int debuggerDataSize) {
this.debuggerDataSize = debuggerDataSize;
}
public int getDataBlocksOffset() {
return dataBlocksOffset;
}
public void setDataBlocksOffset(int dataBlocksOffset) {
this.dataBlocksOffset = dataBlocksOffset;
}
public int getDataBlocksCount() {
return dataBlocksCount;
}
public void setDataBlocksCount(int dataBlocksCount) {
this.dataBlocksCount = dataBlocksCount;
}
public int getBStoreOffset() {
return bStoreOffset;
}
public void setBStoreOffset(int bStoreOffset) {
this.bStoreOffset = bStoreOffset;
}
public int getBStoreSize() {
return bStoreSize;
}
public void setBStoreSize(int bStoreSize) {
this.bStoreSize = bStoreSize;
}
public long getBStoreLimit() {
return bStoreLimit;
}
public void setBStoreLimit(long bStoreLimit) {
this.bStoreLimit = bStoreLimit;
}
public long getDataPageAddress() {
return dataPageAddress;
}
public void setDataPageAddress(long dataPageAddress) {
this.dataPageAddress = dataPageAddress;
}
public int getDataPageOffset() {
return dataPageOffset;
}
public void setDataPageOffset(int dataPageOffset) {
this.dataPageOffset = dataPageOffset;
}
public int getDataPageSize() {
return dataPageSize;
}
public void setDataPageSize(int dataPageSize) {
this.dataPageSize = dataPageSize;
}
}

View File

@ -0,0 +1,203 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.pagedump;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
public class TriageStorage implements StructConverter {
public final static String NAME = "_MI_TRIAGE_STORAGE";
private int version;
private int size;
private int mmSpecialPoolTag;
private int miTriageActionTaken;
private int mmVerifyDriverLevel;
private int kernelVerifier;
private long mmMaximumNonPagedPool;
private long mmAllocatedNonPagedPool;
private long pagedPoolMaximum;
private long pagePoolAllocated;
private long commitedPages;
private long commitedPagesPeak;
private long commitedPagesMaximum;
private DumpFileReader reader;
private long index;
private int psz;
TriageStorage(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
this.psz = reader.getPointerSize();
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setVersion(reader.readNextInt());
setSize(reader.readNextInt());
setMmSpecialPoolTag(reader.readNextInt());
setMiTriageActionTaken(reader.readNextInt());
setMmVerifyDriverLevel(reader.readNextInt());
setKernelVerifier(reader.readNextInt());
setMmMaximumNonPagedPool(reader.readNextLong());
setMmAllocatedNonPagedPool(reader.readNextLong());
setPagedPoolMaximum(reader.readNextLong());
setPagePoolAllocated(reader.readNextLong());
setCommitedPages(reader.readNextLong());
setCommitedPagesPeak(reader.readNextLong());
setCommitedPagesMaximum(reader.readNextLong());
}
@Override
public DataType toDataType() {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "Version", null);
struct.add(DWORD, 4, "Size", null);
struct.add(DWORD, 4, "MmSpecialPoolTag", null);
struct.add(DWORD, 4, "MiTriageActionTaken", null);
struct.add(DWORD, 4, "MmVerifyDriverLevel", null);
struct.add(DWORD, 4, "KernelVerifier", null);
struct.add(QWORD, psz, "MmMaximumNonPagedPool", null);
struct.add(QWORD, psz, "MmAllocatedNonPagedPool", null);
struct.add(QWORD, psz, "PagedPoolMaximum", null);
struct.add(QWORD, psz, "PagePoolAllocated", null);
struct.add(QWORD, psz, "CommitedPages", null);
struct.add(QWORD, psz, "CommitedPagesPeak", null);
struct.add(QWORD, psz, "CommitedPagesMaximum", null);
struct.setCategoryPath(new CategoryPath("/PDMP"));
return struct;
}
public int getVersion() {
return version;
}
public void setVersion(int version) {
this.version = version;
}
public int getSize() {
return size;
}
public void setSize(int size) {
this.size = size;
}
public int getMmSpecialPoolTag() {
return mmSpecialPoolTag;
}
public void setMmSpecialPoolTag(int mmSpecialPoolTag) {
this.mmSpecialPoolTag = mmSpecialPoolTag;
}
public int getMiTriageActionTaken() {
return miTriageActionTaken;
}
public void setMiTriageActionTaken(int miTriageActionTaken) {
this.miTriageActionTaken = miTriageActionTaken;
}
public int getMmVerifyDriverLevel() {
return mmVerifyDriverLevel;
}
public void setMmVerifyDriverLevel(int mmVerifyDriverLevel) {
this.mmVerifyDriverLevel = mmVerifyDriverLevel;
}
public long getMmMaximumNonPagedPool() {
return mmMaximumNonPagedPool;
}
public void setMmMaximumNonPagedPool(long mmMaximumNonPagedPool) {
this.mmMaximumNonPagedPool = mmMaximumNonPagedPool;
}
public int getKernelVerifier() {
return kernelVerifier;
}
public void setKernelVerifier(int kernelVerifier) {
this.kernelVerifier = kernelVerifier;
}
public long getMmAllocatedNonPagedPool() {
return mmAllocatedNonPagedPool;
}
public void setMmAllocatedNonPagedPool(long mmAllocatedNonPagedPool) {
this.mmAllocatedNonPagedPool = mmAllocatedNonPagedPool;
}
public long getPagedPoolMaximum() {
return pagedPoolMaximum;
}
public void setPagedPoolMaximum(long pagedPoolMaximum) {
this.pagedPoolMaximum = pagedPoolMaximum;
}
public long getPagePoolAllocated() {
return pagePoolAllocated;
}
public void setPagePoolAllocated(long pagePoolAllocated) {
this.pagePoolAllocated = pagePoolAllocated;
}
public long getCommitedPages() {
return commitedPages;
}
public void setCommitedPages(long commitedPages) {
this.commitedPages = commitedPages;
}
public long getCommitedPagesPeak() {
return commitedPagesPeak;
}
public void setCommitedPagesPeak(long commitedPagesPeak) {
this.commitedPagesPeak = commitedPagesPeak;
}
public long getCommitedPagesMaximum() {
return commitedPagesMaximum;
}
public void setCommitedPagesMaximum(long commitedPagesMaximum) {
this.commitedPagesMaximum = commitedPagesMaximum;
}
}

View File

@ -0,0 +1,111 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.pagedump;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
public class UnloadedDriver implements StructConverter {
public final static String NAME = "_DUMP_UNLOADED_DRIVERS";
private int nameLength;
private String name;
private long startAddress;
private long endAddress;
private DumpFileReader reader;
private long index;
private int psz;
UnloadedDriver(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
this.psz = reader.getPointerSize();
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setNameLength(reader.readNextShort());
reader.readNextShort();
reader.readNextInt();
reader.readNextPointer();
setName(reader.readNextUnicodeString(12));
setStartAddress(reader.readNextPointer());
setEndAddress(reader.readNextPointer());
}
@Override
public DataType toDataType() {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(WORD, 2, "NameLength", null);
struct.add(WORD, 2, "", null);
struct.add(DWORD, 4, "", null);
struct.add(POINTER, psz, "", null);
struct.add(UTF16, 24, "Name", null);
struct.add(POINTER, psz, "StartAddress", null);
struct.add(POINTER, psz, "EndAddress", null);
struct.setCategoryPath(new CategoryPath("/PDMP"));
return struct;
}
public int getNameLength() {
return nameLength;
}
public void setNameLength(int nameLength) {
this.nameLength = nameLength;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public long getStartAddress() {
return startAddress;
}
public void setStartAddress(long startAddress) {
this.startAddress = startAddress;
}
public long getEndAddress() {
return endAddress;
}
public void setEndAddress(long endAddress) {
this.endAddress = endAddress;
}
public long getSize() {
long len = endAddress - startAddress;
return (len < 0) ? -len : len;
}
}

View File

@ -0,0 +1,176 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.userdump;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class DebugInfo implements StructConverter {
public final static String NAME = "DEBUG_EVENT";
private int eventCode;
private int processId;
private int threadId;
private int exceptionCode;
private int exceptionFlags;
private long exceptionRecord;
private long exceptionAddress;
private int numberOfParameters;
private long parameters[] = new long[16];
private long index;
private int psz;
DebugInfo(DumpFileReader reader, long index) throws IOException {
this.index = index;
this.psz = reader.getPointerSize();
parse(reader);
}
private void parse(DumpFileReader reader) throws IOException {
reader.setPointerIndex(index);
setEventCode(reader.readNextInt());
setProcessId(reader.readNextInt());
setThreadId(reader.readNextInt());
reader.readNextInt();
setExceptionCode(reader.readNextInt());
setExceptionFlags(reader.readNextInt());
setExceptionRecord(reader.readNextPointer());
setExceptionAddress(reader.readNextPointer());
numberOfParameters = reader.readNextInt();
for (int i = 0; i < numberOfParameters; i++) {
parameters[i] = reader.readNextPointer();
}
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(StructConverter.DWORD, 4, "EventCode", null);
struct.add(StructConverter.DWORD, 4, "ProcessId", null);
struct.add(StructConverter.DWORD, 4, "ThreadId", null);
struct.add(StructConverter.DWORD, 4, "_alignment", null);
struct.add(StructConverter.DWORD, 4, "ExceptionCode", null);
struct.add(StructConverter.DWORD, 4, "ExceptionFlags", null);
struct.add(StructConverter.POINTER, psz, "pExceptionRecord", null);
struct.add(StructConverter.POINTER, psz, "ExceptionAddress", null);
struct.add(StructConverter.DWORD, 4, "NumberOfParameters", null);
for (int i = 0; i < numberOfParameters; i++) {
struct.add(StructConverter.POINTER, psz, "Param_" + i, null);
}
struct.setCategoryPath(new CategoryPath("/UDMP"));
return struct;
}
/**
* @return the processId
*/
public int getProcessId() {
return processId;
}
/**
* @param processId the processId to set
*/
public void setProcessId(int processId) {
this.processId = processId;
}
/**
* @return the threadId
*/
public int getThreadId() {
return threadId;
}
/**
* @param threadId the threadId to set
*/
public void setThreadId(int threadId) {
this.threadId = threadId;
}
/**
* @return the exceptionCode
*/
public int getExceptionCode() {
return exceptionCode;
}
/**
* @param exceptionCode the exceptionCode to set
*/
public void setExceptionCode(int exceptionCode) {
this.exceptionCode = exceptionCode;
}
/**
* @return the exceptionAddress
*/
public long getExceptionAddress() {
return exceptionAddress;
}
/**
* @param exceptionAddress the exceptionAddress to set
*/
public void setExceptionAddress(long exceptionAddress) {
this.exceptionAddress = exceptionAddress;
}
/**
* @return the eventCode
*/
public int getEventCode() {
return eventCode;
}
/**
* @param eventCode the eventCode to set
*/
public void setEventCode(int eventCode) {
this.eventCode = eventCode;
}
public int getExceptionFlags() {
return exceptionFlags;
}
public void setExceptionFlags(int exceptionFlags) {
this.exceptionFlags = exceptionFlags;
}
public long getExceptionRecord() {
return exceptionRecord;
}
public void setExceptionRecord(long exceptionRecord) {
this.exceptionRecord = exceptionRecord;
}
}

View File

@ -0,0 +1,161 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.userdump;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class MemoryInfo implements StructConverter {
public final static String NAME = "MEMORY_INFO";
private long baseAddress;
private long allocationBase;
private int allocationProtect;
private long regionSize;
private int state;
private int protect;
private int type;
private DumpFileReader reader;
private long index;
MemoryInfo(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setBaseAddress(reader.readNextPointer());
setAllocationBase(reader.readNextPointer());
setAllocationProtect(reader.readNextInt());
reader.readNextInt();
setRegionSize(reader.readNextPointer());
setState(reader.readNextInt());
setProtect(reader.readNextInt());
setType(reader.readNextInt());
reader.readNextInt();
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(POINTER, 8, "BaseAddress", null);
struct.add(POINTER, 8, "AllocationBase", null);
struct.add(DWORD, 4, "AllocationProtect", null);
struct.add(DWORD, 4, "__alignment1", null);
struct.add(POINTER, 8, "RegionSize", null);
struct.add(DWORD, 4, "State", null);
struct.add(DWORD, 4, "Protect", null);
struct.add(DWORD, 4, "Type", null);
struct.add(DWORD, 4, "__alignment2", null);
struct.setCategoryPath(new CategoryPath("/UDMP"));
return struct;
}
public long getBaseAddress() {
return baseAddress;
}
public void setBaseAddress(long baseAddress) {
this.baseAddress = baseAddress;
}
public long getAllocationBase() {
return allocationBase;
}
public void setAllocationBase(long allocationBase) {
this.allocationBase = allocationBase;
}
public int getAllocationProtect() {
return allocationProtect;
}
public void setAllocationProtect(int allocationProtect) {
this.allocationProtect = allocationProtect;
}
public long getRegionSize() {
return regionSize;
}
public void setRegionSize(long regionSize) {
this.regionSize = regionSize;
}
public int getState() {
return state;
}
public void setState(int state) {
this.state = state;
}
public int getProtect() {
return protect;
}
public void setProtect(int protect) {
this.protect = protect;
}
public int getType() {
return type;
}
public void setType(int type) {
this.type = type;
}
public String getComment() {
String comment = "";
if ((state & 0x1000) > 0) {
comment += "COMMIT ";
}
if ((state & 0x10000) > 0) {
comment += "FREE ";
}
if ((state & 0x2000) > 0) {
comment += "RESERVE ";
}
if ((type & 0x1000000) > 0) {
comment += "IMAGE ";
}
if ((type & 0x40000) > 0) {
comment += "MAPPED ";
}
if ((type & 0x20000) > 0) {
comment += "PRIVATE ";
}
return comment;
}
}

View File

@ -0,0 +1,90 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.userdump;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class MemoryRange implements StructConverter {
public final static String NAME = "MINIDUMP_MEMORY_RANGE";
private long startOfMemoryRange;
private int dataSize;
private int RVA;
private DumpFileReader reader;
private long index;
MemoryRange(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setStartOfMemoryRange(reader.readNextLong());
setDataSize(reader.readNextInt());
setRVA(reader.readNextInt());
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(QWORD, 8, "StartOfMemoryRange", null);
struct.add(DWORD, 4, "DataSize", null);
struct.add(Pointer32DataType.dataType, 4, "RVA", null);
struct.setCategoryPath(new CategoryPath("/UDMP"));
return struct;
}
public long getStartOfMemoryRange() {
return startOfMemoryRange;
}
public void setStartOfMemoryRange(long startOfMemoryRange) {
this.startOfMemoryRange = startOfMemoryRange;
}
public int getDataSize() {
return dataSize;
}
public void setDataSize(int dataSize) {
this.dataSize = dataSize;
}
public int getRVA() {
return RVA;
}
public void setRVA(int rva) {
RVA = rva;
}
}

View File

@ -0,0 +1,127 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.userdump;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class Module implements StructConverter {
public final static String NAME = "MODULE_";
private long moduleBase;
private int moduleSize;
private int moduleNameLength;
private String moduleName;
private DumpFileReader reader;
private long index;
Module(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setModuleBase(reader.readNextPointer());
setModuleSize(reader.readNextInt());
setModuleNameLength(reader.readNextInt());
setModuleName(reader.readNextAsciiString(getModuleNameLength()));
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME + Long.toHexString(moduleBase), 0);
struct.add(StructConverter.POINTER, reader.getPointerSize(), "ModuleBase", null);
struct.add(StructConverter.DWORD, 4, "ModuleSize", null);
struct.add(StructConverter.DWORD, 4, "ModuleInfoLength", null);
if (getModuleNameLength() > 0) {
struct.add(new StringDataType(), getModuleNameLength(), "ModuleInfo", null);
}
struct.setCategoryPath(new CategoryPath("/UDMP"));
return struct;
}
/**
* @return the moduleBase
*/
public long getModuleBase() {
return moduleBase;
}
/**
* @param moduleBase the moduleBase to set
*/
public void setModuleBase(long moduleBase) {
this.moduleBase = moduleBase;
}
/**
* @return the moduleSize
*/
public int getModuleSize() {
return moduleSize;
}
/**
* @param moduleSize the moduleSize to set
*/
public void setModuleSize(int moduleSize) {
this.moduleSize = moduleSize;
}
/**
* @return the moduleName
*/
public String getModuleName() {
return moduleName;
}
/**
* @param moduleName the moduleName to set
*/
public void setModuleName(String moduleName) {
this.moduleName = moduleName;
}
/**
* @return the moduleNameLength
*/
public int getModuleNameLength() {
return moduleNameLength;
}
/**
* @param moduleNameLength the moduleNameLength to set
*/
public void setModuleNameLength(int moduleNameLength) {
this.moduleNameLength = moduleNameLength;
}
}

View File

@ -0,0 +1,186 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.userdump;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class Thread implements StructConverter {
public final static String NAME = "THREAD";
private int threadId;
private int suspendCount;
private int priorityClass;
private int platformId;
private int priority;
private long teb;
private long stackStartOfMemoryRange;
private int stackDataSize;
private int stackRVA;
private int contextDataSize;
private int contextRVA;
private DumpFileReader reader;
private long index;
public Thread(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
parse();
}
private void parse() throws IOException {
reader.setPointerIndex(index);
setThreadId(reader.readNextInt());
setSuspendCount(reader.readNextInt());
setPriorityClass(reader.readNextInt());
setPriority(reader.readNextInt());
setTeb(reader.readNextLong());
setStackStartOfMemoryRange(reader.readNextLong());
setStackDataSize(reader.readNextInt());
setStackRVA(reader.readNextInt());
setContextDataSize(reader.readNextInt());
setContextRVA(reader.readNextInt());
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
StructureDataType struct = new StructureDataType(NAME, 0);
struct.add(DWORD, 4, "ThreadId", null);
struct.add(DWORD, 4, "SuspendCount", null);
struct.add(DWORD, 4, "PriorityClass", null);
struct.add(DWORD, 4, "Priority", null);
struct.add(QWORD, 8, "Teb", null);
StructureDataType s0 = new StructureDataType("Stack", 0);
s0.add(QWORD, 8, "StartOfMemoryRange", null);
s0.add(DWORD, 4, "DataSize", null);
s0.add(Pointer32DataType.dataType, 4, "RVA", null);
StructureDataType s1 = new StructureDataType("Context", 0);
s1.add(DWORD, 4, "DataSize", null);
s1.add(Pointer32DataType.dataType, 4, "RVA", null);
struct.add(s0, s0.getLength(), s0.getDisplayName(), null);
struct.add(s1, s1.getLength(), s1.getDisplayName(), null);
struct.add(new ArrayDataType(QWORD, 4, 8), 0x20, "filler", null);
struct.setCategoryPath(new CategoryPath("/UDMP"));
return struct;
}
public int getThreadId() {
return threadId;
}
public void setThreadId(int threadId) {
this.threadId = threadId;
}
public int getSuspendCount() {
return suspendCount;
}
public void setSuspendCount(int suspendCount) {
this.suspendCount = suspendCount;
}
public int getPriorityClass() {
return priorityClass;
}
public void setPriorityClass(int priorityClass) {
this.priorityClass = priorityClass;
}
public int getPlatformId() {
return platformId;
}
public void setPlatformId(int platformId) {
this.platformId = platformId;
}
public int getPriority() {
return priority;
}
public void setPriority(int priority) {
this.priority = priority;
}
public long getTeb() {
return teb;
}
public void setTeb(long teb) {
this.teb = teb;
}
public long getStackStartOfMemoryRange() {
return stackStartOfMemoryRange;
}
public void setStackStartOfMemoryRange(long stackStartOfMemoryRange) {
this.stackStartOfMemoryRange = stackStartOfMemoryRange;
}
public int getStackDataSize() {
return stackDataSize;
}
public void setStackDataSize(int stackDataSize) {
this.stackDataSize = stackDataSize;
}
public int getStackRVA() {
return stackRVA;
}
public void setStackRVA(int stackRVA) {
this.stackRVA = stackRVA;
}
public int getContextDataSize() {
return contextDataSize;
}
public void setContextDataSize(int contextDataSize) {
this.contextDataSize = contextDataSize;
}
public int getContextRVA() {
return contextRVA;
}
public void setContextRVA(int contextRVA) {
this.contextRVA = contextRVA;
}
}

View File

@ -0,0 +1,174 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.userdump;
import java.io.IOException;
import java.util.List;
import ghidra.app.util.Option;
import ghidra.app.util.OptionUtils;
import ghidra.app.util.opinion.PeLoader;
import ghidra.file.formats.dump.*;
import ghidra.file.formats.dump.cmd.ModuleToPeHelper;
import ghidra.framework.options.Options;
import ghidra.program.model.data.*;
import ghidra.program.model.listing.Program;
import ghidra.util.Msg;
import ghidra.util.task.TaskMonitor;
public class Userdump extends DumpFile {
public static final int SIGNATURE = 0x52455355; // "USER"
UserdumpFileHeader header;
private boolean createBlocks;
public Userdump(DumpFileReader reader, ProgramBasedDataTypeManager dtm, List<Option> options,
TaskMonitor monitor) {
super(reader, dtm, options, monitor);
Options props = program.getOptions(Program.PROGRAM_INFO);
props.setString("Executable Format", PeLoader.PE_NAME);
initManagerList(null);
createBlocks =
OptionUtils.getBooleanOptionValue(DumpFileLoader.CREATE_MEMORY_BLOCKS_OPTION_NAME,
options, DumpFileLoader.CREATE_MEMORY_BLOCKS_OPTION_DEFAULT);
try {
header = new UserdumpFileHeader(reader, 0L);
data.add(new DumpData(0, header.toDataType()));
buildStructures();
}
catch (Exception e) {
Msg.error(this, e.getMessage());
}
}
public UserdumpFileHeader getFileHeader() {
return header;
}
private void buildStructures() throws Exception {
DataType dt = header.toDataType();
data.add(new DumpData(0, "DumpHeader", dt.getLength()));
int regionOffset = (int) header.getMemoryRegionOffset();
addInteriorAddressObject("DumpHeader", 0, 0L, regionOffset);
int blocksLength = (int) (reader.length() - regionOffset);
addInteriorAddressObject("RawBlocks", regionOffset,
header.getMemoryRegionOffset(), blocksLength);
CategoryPath path = new CategoryPath("/winnt.h");
long offset = header.getThreadOffset();
DataType ctxt = getTypeFromArchive(path, "CONTEXT");
if (ctxt != null) {
ArrayDataType actxt =
new ArrayDataType(ctxt, header.getThreadCount(), ctxt.getLength());
data.add(new DumpData(offset, actxt, "THREAD_CONTEXTS", false, true));
for (int t = 0; t < header.getThreadCount(); t++) {
setProgramContext(offset + t * ctxt.getLength(), ctxt, Integer.toHexString(t));
}
}
offset = header.getThreadStateOffset();
long start = offset;
reader.setPointerIndex(offset);
for (int i = 0; i < header.getThreadCount(); i++) {
Thread t = new Thread(reader, offset);
String tid = Integer.toHexString(t.getThreadId());
dt = t.toDataType();
long stackOffset = t.getStackRVA();
if (createBlocks && stackOffset != 0) {
addInteriorAddressObject("ThreadStack_" + tid, (int) stackOffset,
t.getStackStartOfMemoryRange(), t.getStackDataSize());
}
offset += dt.getLength();
}
ArrayDataType athreads = new ArrayDataType(dt, header.getThreadCount(), 0x50);
data.add(new DumpData(start, athreads, "THREAD_INFO", false, true));
offset = header.getModuleOffset();
start = offset;
StructureDataType modulesDT = new StructureDataType("MODULE_INFO", 0);
for (int i = 0; i < header.getModuleCount(); i++) {
Module m = new Module(reader, offset);
dt = m.toDataType();
modulesDT.add(dt, dt.getLength(), m.getModuleName(), null);
addModule(m.getModuleName(), m.getModuleBase(), i, m.getModuleSize());
addExteriorAddressObject(m.getModuleName(), 0, m.getModuleBase(), m.getModuleSize());
offset += dt.getLength();
}
data.add(new DumpData(start, modulesDT, "MODULE_INFO", false, false));
long rva = header.getMemoryRegionOffset();
offset = header.getMemoryDescriptorOffset();
start = offset;
StructureDataType blocks = new StructureDataType("MEMORY_BLOCKS", 0);
for (int i = 0; i < header.getMemoryRegionCount(); i++) {
MemoryInfo minfo = new MemoryInfo(reader, offset);
dt = minfo.toDataType();
long regionSize = minfo.getRegionSize();
if (createBlocks) {
addInteriorAddressObject("Memory", (int) rva, minfo.getBaseAddress(), regionSize);
}
//ArrayDataType block =
// new ArrayDataType(ByteDataType.dataType, (int) regionSize, 1);
//blocks.add(block, (int) regionSize,
// "MemoryBlock_" + Long.toHexString(minfo.getBaseAddress()), null);
rva += regionSize;
offset += dt.getLength();
}
ArrayDataType ainfo = new ArrayDataType(dt, header.getMemoryRegionCount(), dt.getLength());
data.add(new DumpData(start, ainfo, "MEMORY_INFO", false, true));
data.add(new DumpData(regionOffset, blocks, "MEMORY_BLOCKS", false, true));
offset = header.getDebugEventOffset();
DebugInfo debugInfo = new DebugInfo(reader, offset);
data.add(new DumpData(offset, debugInfo.toDataType(), "DEBUG_EVENT_INFO", false, true));
processId = debugInfo.getProcessId();
threadId = debugInfo.getThreadId();
addProcess(processId, "TARGET", 0);
addThread(processId, threadId, 0);
}
public static String getMachineType(DumpFileReader reader) throws IOException {
UserdumpFileHeader header = new UserdumpFileHeader(reader, 0L);
return Integer.toHexString(header.getMachineImageType());
}
public void analyze(TaskMonitor monitor) {
boolean analyzeEmbeddedObjects =
OptionUtils.getBooleanOptionValue(DumpFileLoader.ANALYZE_EMBEDDED_OBJECTS_OPTION_NAME,
options,
DumpFileLoader.ANALYZE_EMBEDDED_OBJECTS_OPTION_DEFAULT);
if (analyzeEmbeddedObjects) {
ModuleToPeHelper.queryModules(program, monitor);
}
}
}

View File

@ -0,0 +1,217 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.file.formats.dump.userdump;
import java.io.IOException;
import ghidra.app.util.bin.StructConverter;
import ghidra.file.formats.dump.DumpFileReader;
import ghidra.program.model.data.*;
import ghidra.util.exception.DuplicateNameException;
public class UserdumpFileHeader implements StructConverter {
public final static String NAME = "USERDUMP_HEADER";
private int signature;
private int validDump;
private int majorVersion;
private int minorVersion;
private int machineImageType;
private int threadCount;
private int moduleCount;
private int memoryRegionCount;
private long threadOffset;
private long moduleOffset;
private long memoryRegionOffset;
private long memoryDescriptorOffset;
private long debugEventOffset;
private long threadStateOffset;
protected DumpFileReader reader;
protected long index;
private int psz;
UserdumpFileHeader(DumpFileReader reader, long index) throws IOException {
this.reader = reader;
this.index = index;
this.psz = reader.getPointerSize();
parse();
}
protected void parse() throws IOException {
reader.setPointerIndex(index);
setSignature(reader.readNextInt());
setValidDump(reader.readNextInt());
setMajorVersion(reader.readNextInt());
setMinorVersion(reader.readNextInt());
setMachineImageType(reader.readNextInt());
setThreadCount(reader.readNextInt());
setModuleCount(reader.readNextInt());
setMemoryRegionCount(reader.readNextInt());
setThreadOffset(readNextPointer());
setModuleOffset(readNextPointer());
setMemoryRegionOffset(readNextPointer());
setMemoryDescriptorOffset(readNextPointer());
setDebugEventOffset(readNextPointer());
setThreadStateOffset(readNextPointer());
}
/**
* @see ghidra.app.util.bin.StructConverter#toDataType()
*/
public DataType toDataType() throws DuplicateNameException {
Structure struct = new StructureDataType(NAME, 0);
struct.add(STRING, 4, "Signature", null);
struct.add(STRING, 4, "ValidDump", null);
struct.add(DWORD, 4, "MajorVersion", null);
struct.add(DWORD, 4, "MinorVersion", null);
struct.add(DWORD, 4, "MachineImageType", null);
struct.add(DWORD, 4, "NumberOfThreads", null);
struct.add(DWORD, 4, "ModuleCount", null);
struct.add(DWORD, 4, "MemoryRegionCount", null);
struct.add(POINTER, psz, "ThreadContextOffset", null);
struct.add(POINTER, psz, "ModulesOffset", null);
struct.add(POINTER, psz, "MemoryRegionOffset", null);
struct.add(POINTER, psz, "MemoryDescriptorOffset", null);
struct.add(POINTER, psz, "DebugEventOffset", null);
struct.add(POINTER, psz, "ThreadStateOffset", null);
struct.setCategoryPath(new CategoryPath("/UDMP"));
return struct;
}
public int getSignature() {
return signature;
}
public void setSignature(int signature) {
this.signature = signature;
}
public int getValidDump() {
return validDump;
}
public void setValidDump(int validDump) {
this.validDump = validDump;
}
public int getMajorVersion() {
return majorVersion;
}
public void setMajorVersion(int majorVersion) {
this.majorVersion = majorVersion;
}
public int getMinorVersion() {
return minorVersion;
}
public void setMinorVersion(int minorVersion) {
this.minorVersion = minorVersion;
}
public int getMachineImageType() {
return machineImageType;
}
public void setMachineImageType(int machineImageType) {
this.machineImageType = machineImageType;
}
public int getThreadCount() {
return threadCount;
}
public void setThreadCount(int threadCount) {
this.threadCount = threadCount;
}
public int getModuleCount() {
return moduleCount;
}
public void setModuleCount(int moduleCount) {
this.moduleCount = moduleCount;
}
public int getMemoryRegionCount() {
return memoryRegionCount;
}
public void setMemoryRegionCount(int memoryRegionCount) {
this.memoryRegionCount = memoryRegionCount;
}
public long getThreadOffset() {
return threadOffset;
}
public void setThreadOffset(long threadOffset) {
this.threadOffset = threadOffset;
}
public long getModuleOffset() {
return moduleOffset;
}
public void setModuleOffset(long moduleOffset) {
this.moduleOffset = moduleOffset;
}
public long getMemoryRegionOffset() {
return memoryRegionOffset;
}
public void setMemoryRegionOffset(long memoryRegionOffset) {
this.memoryRegionOffset = memoryRegionOffset;
}
public long getMemoryDescriptorOffset() {
return memoryDescriptorOffset;
}
public void setMemoryDescriptorOffset(long memoryDescriptorOffset) {
this.memoryDescriptorOffset = memoryDescriptorOffset;
}
public long getDebugEventOffset() {
return debugEventOffset;
}
public void setDebugEventOffset(long debugEventOffset) {
this.debugEventOffset = debugEventOffset;
}
public long getThreadStateOffset() {
return threadStateOffset;
}
public void setThreadStateOffset(long threadStateOffset) {
this.threadStateOffset = threadStateOffset;
}
private long readNextPointer() throws IOException {
return psz == 4 ? reader.readNextInt() : reader.readNextLong();
}
}

View File

@ -204,6 +204,7 @@ public class ProgramDB extends DomainObjectAdapterDB implements Program, ChangeM
private LanguageTranslator languageUpgradeTranslator;
private boolean imageBaseOverride = false;
private Address effectiveImageBase = null;
private boolean recordChanges;
private OverlaySpaceAdapterDB overlaySpaceAdapter;
@ -1264,9 +1265,18 @@ public class ProgramDB extends DomainObjectAdapterDB implements Program, ChangeM
@Override
public Address getImageBase() {
if (effectiveImageBase != null) {
return effectiveImageBase;
}
return addrMap.getImageBase();
}
// TODO: We need a more global solution for this.
@Deprecated
public void setEffectiveImageBase(Address imageBase) {
effectiveImageBase = imageBase;
}
@Override
public void setImageBase(Address base, boolean commit)
throws AddressOverflowException, LockException, IllegalStateException {

View File

@ -1015,7 +1015,6 @@ public class MemoryMapDB implements Memory, ManagerDB, LiveMemoryListener {
MemoryBlock newBlock = null;
try {
memBlock1.join(memBlock2);
reloadAll();
newBlock = getBlockDB(block1Addr);
fireBlocksJoined(newBlock, block2Addr);
}