GT-2895 changes from first review

This commit is contained in:
ghizard 2019-06-17 14:46:24 -04:00
parent 26a36ab643
commit 43fb55ffb7
55 changed files with 1246 additions and 1420 deletions

View file

@ -22,10 +22,11 @@ import generic.io.NullWriter;
import ghidra.framework.Application;
/**
* A utility class providing logging PDB parsing and analyzing data and metrics for the purposes
* of debugging and aiding in continued research and development of this package.
* A utility class providing logging for: PDB parsing and PDB analysis. It includes data and
* metrics for the purposes of debugging and aiding in continued research and development of
* this package.
*/
public class PdbMessageLog {
public class PdbLog {
private static File logFile;
private static Writer nullWriter = new NullWriter();
@ -61,10 +62,9 @@ public class PdbMessageLog {
* uses a format string and a variable arguments list of lambdas to allow for deferred
* processing of the message to output. Thus, when message output is disabled, the client
* does not endure as much cost in supplying a message string that is not used.
* Note: User must supply appropriate new lines.
* @param format a {@link String} format list as would be used to a printf() function, but
* which must only specify {@code %s} {@link String} outputs.
* @param suppliers variable number of {@link Supplier}<{@link String}> arguments. The
* @param suppliers variable number of {@link Supplier}&lt;{@link String}&gt; arguments. The
* number must match the number of {@code %s} outputs in the format string.
* @throws IOException upon problem with {@link Writer#append(CharSequence)} or
* {@link Writer#flush()}.
@ -85,17 +85,17 @@ public class PdbMessageLog {
varArgs[i] = var;
}
writer.append(String.format(format, varArgs));
writer.append("\n");
writer.flush();
}
/**
* Outputs a message to the PDB log if messaging has been enable, else ignored. This method
* uses a {@link Supplier}<{@link String}> to allow for deferred processing of the message
* uses a {@link Supplier}&lt;{@link String}&gt; to allow for deferred processing of the message
* to output. Thus, when message output is disabled, the client does not endure as much cost
* in supplying a message string that is not used.
* Note: User must supply appropriate new lines.
* @param supplier a {@link Supplier}<{@link String}> that supplies a {@link String} message
* to be output.
* @param supplier a {@link Supplier}&lt;{@link String}&gt; that supplies a {@link String}
* message to be output.
* @throws IOException upon problem with {@link Writer#append(CharSequence)} or
* {@link Writer#flush()}.
* @see #setEnabled(boolean)
@ -106,23 +106,20 @@ public class PdbMessageLog {
}
writer.append(supplier.get());
writer.append("\n");
writer.flush();
}
/**
* Outputs a {@link String} message to the PDB log if messaging has been enable, else ignored.
* Note: User must supply appropriate new lines.
* @param message a {@link String} message to be output.
* @throws IOException upon problem with {@link Writer#append(CharSequence)} or
* {@link Writer#flush()}.
* @see #setEnabled(boolean)
*/
public static void message(String message) throws IOException {
if (!enabled) {
return;
}
writer.append(message);
writer.append("\n");
writer.flush();
}
@ -135,10 +132,6 @@ public class PdbMessageLog {
fileWriter.close();
fileWriter = null;
}
if (nullWriter != null) {
nullWriter.close();
nullWriter = null;
}
}
/**
@ -148,6 +141,11 @@ public class PdbMessageLog {
*/
private static Writer createFileWriter() throws IOException {
/*
* Since we want this logging to be used sparingly and on a case-by-case basis, we
* delete the log at the start of each JVM session. New log writing always uses the
* same log file name with not date or process ID attributes.
*/
logFile = new File(Application.getUserSettingsDirectory(), "pdb.analyzer.log");
if (logFile.exists()) {
logFile.delete();

View file

@ -17,8 +17,7 @@ package ghidra.pdb.msfreader;
import java.io.IOException;
import java.io.RandomAccessFile;
import org.apache.commons.lang3.Validate;
import java.util.Objects;
import ghidra.pdb.PdbException;
import ghidra.pdb.pdbreader.PdbReaderOptions;
@ -31,9 +30,6 @@ import ghidra.util.task.TaskMonitor;
*/
public class MsfParser {
//==============================================================================================
// API
//==============================================================================================
/**
* Detects, creates, and returns the appropriate {@link AbstractMsf} object found for
* the filename given.
@ -45,11 +41,11 @@ public class MsfParser {
* @throws PdbException If an appropriate object cannot be created.
* @throws CancelledException Upon user cancellation.
*/
public static AbstractMsf parse(String filename, PdbReaderOptions pdbOptions, TaskMonitor monitor)
throws IOException, PdbException, CancelledException {
Validate.notNull(filename, "filename cannot be null)");
Validate.notNull(pdbOptions, "pdbOptions cannot be null)");
Validate.notNull(monitor, "monitor cannot be null)");
public static AbstractMsf parse(String filename, PdbReaderOptions pdbOptions,
TaskMonitor monitor) throws IOException, PdbException, CancelledException {
Objects.requireNonNull(filename, "filename cannot be null");
Objects.requireNonNull(pdbOptions, "pdbOptions cannot be null");
Objects.requireNonNull(monitor, "monitor cannot be null");
AbstractMsf msf;
RandomAccessFile file = new RandomAccessFile(filename, "r");

View file

@ -19,8 +19,6 @@ import java.io.IOException;
import java.io.Writer;
import java.util.*;
import org.apache.commons.lang3.Validate;
import ghidra.pdb.PdbByteReader;
import ghidra.pdb.PdbException;
import ghidra.pdb.pdbreader.symbol.AbstractMsSymbol;
@ -86,7 +84,7 @@ public abstract class AbstractDatabaseInterface {
* @param streamNumber The stream number of the stream containing the Database Interface.
*/
public AbstractDatabaseInterface(AbstractPdb pdb, int streamNumber) {
Validate.notNull(pdb, "pdb cannot be null)");
Objects.requireNonNull(pdb, "pdb cannot be null");
this.pdb = pdb;
this.streamNumber = streamNumber;
globalSymbolInformation = new GlobalSymbolInformation(pdb);
@ -175,22 +173,22 @@ public abstract class AbstractDatabaseInterface {
}
/**
* Returns the list of regular symbols.
* @return {@link Map}<{@link Long},{@link AbstractMsSymbol}> of buffer offsets to
* Returns the list of combined global/public symbols.
* @return {@link Map}&lt;{@link Long},{@link AbstractMsSymbol}&gt; of buffer offsets to
* symbols.
*/
public Map<Long, AbstractMsSymbol> getSymbolMap() {
return symbolRecords.getSymbolMap();
public Map<Long, AbstractMsSymbol> getSymbolsByOffset() {
return symbolRecords.getSymbolsByOffset();
}
/**
* Returns the buffer-offset-to-symbol map for the module as specified by moduleNumber.
* @param moduleNumber The number ID of the module for which to return the list.
* @return {@link Map}<{@link Long},{@link AbstractMsSymbol}> of buffer offsets to
* @return {@link Map}&lt;{@link Long},{@link AbstractMsSymbol}&gt; of buffer offsets to
* symbols for the specified module.
*/
public Map<Long, AbstractMsSymbol> getModuleSymbolMap(int moduleNumber) {
return symbolRecords.getModuleSymbolMap(moduleNumber);
public Map<Long, AbstractMsSymbol> getModuleSymbolsByOffset(int moduleNumber) {
return symbolRecords.getModuleSymbolsByOffset(moduleNumber);
}
/**
@ -436,14 +434,13 @@ public abstract class AbstractDatabaseInterface {
assert false;
}
int numRefs = substreamReader.parseUnsignedShortVal();
int x = 0;
for (int i = 0; i < numInformationModules; i++) {
for (int i = 0, x = 0; i < numInformationModules; i++) {
monitor.checkCanceled();
int refIndex = substreamReader.parseUnsignedShortVal();
AbstractModuleInformation module = moduleInformationList.get(i);
int num = module.getNumFilesContributing();
if (refIndex != x) {
assert false;
throw new PdbException("Corrupt file information data");
}
x += num;
}

View file

@ -23,14 +23,8 @@ import ghidra.pdb.*;
*/
public abstract class AbstractOffset extends AbstractParsableItem {
//==============================================================================================
// Internals
//==============================================================================================
protected int offsetVal;
//==============================================================================================
// API
//==============================================================================================
/**
* Returns the offset value.
* @return Offset value. Defaults to zero if not parsed.
@ -48,9 +42,6 @@ public abstract class AbstractOffset extends AbstractParsableItem {
offsetVal = doParse(reader);
}
//==============================================================================================
// Abstract Methods
//==============================================================================================
/**
* Parses the offset value from the {@link PdbByteReader}.
* @param reader {@link PdbByteReader}.

View file

@ -88,7 +88,7 @@ public abstract class AbstractPdb implements AutoCloseable {
private TypeParser typeParser;
private SymbolParser symbolParser;
//==============================================================================================
private Stack<CategoryIndex> dependencyStack = new Stack<>();
private Deque<CategoryIndex> dependencyStack = new ArrayDeque<>();
private DependencyGraph<CategoryIndex> dependencyGraph = new DependencyGraph<>();
private List<CategoryIndex> orderedDependenyIndices = new ArrayList<>();
@ -238,6 +238,14 @@ public abstract class AbstractPdb implements AutoCloseable {
return targetProcessor;
}
/**
* Returns whether there is minimal debug information.
* @return {@code true} if there is minimal debug information.
*/
public boolean hasMinimalDebugInfo() {
return minimalDebugInfo;
}
/**
* Set the index number of the target processor used for compilation.
* @param targetProcessorIn Processor identifier.
@ -381,10 +389,10 @@ public abstract class AbstractPdb implements AutoCloseable {
}
/**
* Returns the {@link DependencyGraph}<{@link CategoryIndex}>. Dependency order is not
* Returns the {@link DependencyGraph}&lt;{@link CategoryIndex}&gt;. Dependency order is not
* a PDB feature. It is something we added (and might be removed in the future) as we
* have investigated how to analyze and apply the PDB.
* @return {@link DependencyGraph}<{@link CategoryIndex}>.
* @return {@link DependencyGraph}&lt;{@link CategoryIndex}&gt;.
*/
public DependencyGraph<CategoryIndex> getDependencyGraphCopy() {
return dependencyGraph.copy();
@ -426,9 +434,10 @@ public abstract class AbstractPdb implements AutoCloseable {
}
/**
* Method to be called during the creation of {@link DependencyGraph}<{@link CategoryIndex}>
* whenever a node (see {@link #pushDependencyStack(CategoryIndex)}) will have no more
* dependents of its own (or is done being processed).
* Method to be called during the creation of
* {@link DependencyGraph}&lt;{@link CategoryIndex}&gt; whenever a node
* (see {@link #pushDependencyStack(CategoryIndex)}) will have no more dependents of its own
* (or is done being processed).
*/
public void popDependencyStack() {
dependencyStack.pop();
@ -464,8 +473,8 @@ public abstract class AbstractPdb implements AutoCloseable {
}
/**
* Dumps the {@link DependencyGraph}<{@link CategoryIndex}> information. The
* {@link DependencyGraph}<{@link CategoryIndex}> is not a PDB feature. It is
* Dumps the {@link DependencyGraph}&lt;{@link CategoryIndex}&gt; information. The
* {@link DependencyGraph}&lt;{@link CategoryIndex}&gt; is not a PDB feature. It is
* something we added (and might be removed in the future) as we have investigated how to
* analyze and apply the PDB. This package-protected method is for debugging only.
* @return {@link String} of pretty output.
@ -507,9 +516,9 @@ public abstract class AbstractPdb implements AutoCloseable {
/**
* Dumps the Dependency Order information as found in the
* {@link DependencyGraph}<{@link CategoryIndex}>. Dependency order is not a PDB feature.
* It is something we added (and might be removed in the future) as we have investigated
* how to analyze and apply the PDB. This package-protected method is for debugging only.
* {@link DependencyGraph}&lt;{@link CategoryIndex}&gt;. Dependency order is not a PDB feature.
* It is something we added (and might be removed in the future) as we have investigated
* how to analyze and apply the PDB. This package-protected method is for debugging only.
* @return {@link String} of pretty output.
*/
protected String dumpDependencyOrder() {

View file

@ -15,7 +15,7 @@
*/
package ghidra.pdb.pdbreader;
import org.apache.commons.lang3.Validate;
import java.util.Objects;
import ghidra.pdb.*;
@ -25,22 +25,15 @@ import ghidra.pdb.*;
*/
public abstract class AbstractString extends AbstractParsableItem {
//==============================================================================================
// Internals
//==============================================================================================
protected AbstractPdb pdb;
protected String string = "";
//==============================================================================================
// API
//==============================================================================================
/**
* Constructor.
* @param pdb {@link AbstractPdb} to which this type belongs.
*/
public AbstractString(AbstractPdb pdb) {
Validate.notNull(pdb, "pdb cannot be null)");
Objects.requireNonNull(pdb, "pdb cannot be null");
this.pdb = pdb;
}
@ -66,9 +59,6 @@ public abstract class AbstractString extends AbstractParsableItem {
string = doParse(reader);
}
//==============================================================================================
// Abstract Methods
//==============================================================================================
/**
* Parses the string from the {@link PdbByteReader}.
* @param reader {@link PdbByteReader}.

View file

@ -23,14 +23,8 @@ import ghidra.pdb.*;
*/
public abstract class AbstractTypeIndex extends AbstractParsableItem {
//==============================================================================================
// Internals
//==============================================================================================
protected int indexVal;
//==============================================================================================
// API
//==============================================================================================
/**
* Returns the index value.
* @return The value of the index. Defaults to zero if not parsed.
@ -48,9 +42,6 @@ public abstract class AbstractTypeIndex extends AbstractParsableItem {
indexVal = doParse(reader);
}
//==============================================================================================
// Abstract Methods
//==============================================================================================
/**
* Parses the index value from the {@link PdbByteReader}.
* @param reader {@link PdbByteReader} from which to read the value.

View file

@ -19,8 +19,6 @@ import java.io.IOException;
import java.io.Writer;
import java.util.*;
import org.apache.commons.lang3.Validate;
import ghidra.pdb.PdbByteReader;
import ghidra.pdb.PdbException;
import ghidra.pdb.pdbreader.type.AbstractMsType;
@ -55,7 +53,7 @@ public abstract class AbstractTypeProgramInterface {
protected int typeIndexMaxExclusive;
protected int dataLength;
protected Map<Integer, PrimitiveMsType> primitivesMap = new HashMap<>();
protected Map<Integer, PrimitiveMsType> primitiveTypesByRecordNumber = new HashMap<>();
protected List<AbstractMsType> typeList = new ArrayList<>();
protected int versionNumber = 0;
@ -73,7 +71,7 @@ public abstract class AbstractTypeProgramInterface {
* {@link AbstractTypeProgramInterface} data.
*/
public AbstractTypeProgramInterface(AbstractPdb pdb, int streamNumber) {
Validate.notNull(pdb, "pdb cannot be null)");
Objects.requireNonNull(pdb, "pdb cannot be null");
this.pdb = pdb;
this.streamNumber = streamNumber;
}
@ -133,10 +131,10 @@ public abstract class AbstractTypeProgramInterface {
*/
public AbstractMsType getRecord(int recordNumber) {
if (recordNumber < typeIndexMin) {
PrimitiveMsType primitive = primitivesMap.get(recordNumber);
PrimitiveMsType primitive = primitiveTypesByRecordNumber.get(recordNumber);
if (primitive == null) {
primitive = new PrimitiveMsType(pdb, recordNumber);
primitivesMap.put(recordNumber, primitive);
primitiveTypesByRecordNumber.put(recordNumber, primitive);
}
return primitive;
}
@ -194,7 +192,7 @@ public abstract class AbstractTypeProgramInterface {
* @param typeIndexMaxExclusive One greater than the MaxIndex to set/use.
*/
AbstractTypeProgramInterface(AbstractPdb pdb, int typeIndexMin, int typeIndexMaxExclusive) {
Validate.notNull(pdb, "pdb cannot be null)");
Objects.requireNonNull(pdb, "pdb cannot be null");
this.pdb = pdb;
this.typeIndexMin = typeIndexMin;
this.typeIndexMaxExclusive = typeIndexMaxExclusive;

View file

@ -33,15 +33,9 @@ public class CategoryIndex implements Comparable<CategoryIndex> {
DATA, ITEM, SYMBOL
}
//==============================================================================================
// Internals
//==============================================================================================
private Category category;
private int index;
//==============================================================================================
// API
//==============================================================================================
/**
* Constructor.
* @param category {@link CategoryIndex.Category} to be assigned.

View file

@ -71,10 +71,10 @@ class DatabaseInterface extends AbstractDatabaseInterface {
// TODO: evaluate. I don't think we need GlobalSymbolInformation (hash) or the
// PublicSymbolInformation (hash), as they are both are search mechanisms.
symbolRecords.deserialize(monitor);
globalSymbolInformation.deserialize(
pdb.databaseInterface.getGlobalSymbolsHashMaybeStreamNumber(), false, monitor);
publicSymbolInformation.deserialize(
pdb.databaseInterface.getPublicStaticSymbolsHashMaybeStreamNumber(), true, monitor);
globalSymbolInformation.deserialize(getGlobalSymbolsHashMaybeStreamNumber(), false,
monitor);
publicSymbolInformation.deserialize(getPublicStaticSymbolsHashMaybeStreamNumber(), true,
monitor);
//TODO: SectionContributions has information about code sections and refers to
// debug streams for each.
}

View file

@ -34,6 +34,8 @@ public class DatabaseInterfaceNew extends AbstractDatabaseInterface {
//==============================================================================================
// Internals
//==============================================================================================
private static final long HEADER_MAGIC = 0xeffeeffeL;
protected Hasher hasher; //Might belong in parent? Used in parent (even older Hasher?)
// The source of these values can overlay other fields in older versions of this type.
@ -144,10 +146,10 @@ public class DatabaseInterfaceNew extends AbstractDatabaseInterface {
// TODO: evaluate. I don't think we need GlobalSymbolInformation (hash) or the
// PublicSymbolInformation (hash), as they are both are search mechanisms.
symbolRecords.deserialize(monitor);
globalSymbolInformation.deserialize(
pdb.databaseInterface.getGlobalSymbolsHashMaybeStreamNumber(), false, monitor);
publicSymbolInformation.deserialize(
pdb.databaseInterface.getPublicStaticSymbolsHashMaybeStreamNumber(), true, monitor);
globalSymbolInformation.deserialize(getGlobalSymbolsHashMaybeStreamNumber(), false,
monitor);
publicSymbolInformation.deserialize(getPublicStaticSymbolsHashMaybeStreamNumber(), true,
monitor);
//TODO: Process further information that might be found from ProcessTypeServerMap,
// and processEditAndContinueInformation.
debugData.deserialize(monitor);
@ -286,7 +288,7 @@ public class DatabaseInterfaceNew extends AbstractDatabaseInterface {
//System.out.println(substreamReader.dump(0x1000));
long hdr = substreamReader.parseUnsignedIntVal();
int ver = substreamReader.parseInt(); // spec says unsigned, but I believe small vals.
if (hdr != 0xeffeeffeL) {
if (hdr != HEADER_MAGIC) {
return; //For now... we are not going to try to populate this with a conversion.
}
switch (ver) {

View file

@ -66,7 +66,7 @@ public class DatabaseInterfaceParser {
// In support of debug.
debugReader = reader;
PdbMessageLog.message(this::debug1);
PdbLog.message(this::debugDump);
int headerSignature = reader.parseInt();
int versionNumber = reader.parseInt();
@ -91,7 +91,7 @@ public class DatabaseInterfaceParser {
return databaseInterface;
}
private String debug1() {
private String debugDump() {
return "DatabaseInterfaceParser data on stream " + getStreamNumber() + ":\n" +
debugReader.dump() + "\n";
}

View file

@ -17,10 +17,7 @@ package ghidra.pdb.pdbreader;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.Validate;
import java.util.*;
import ghidra.pdb.PdbByteReader;
import ghidra.pdb.PdbException;
@ -87,12 +84,12 @@ public class DebugData {
* @param pdb {@link AbstractPdb} that owns this {@link DebugData}.
*/
public DebugData(AbstractPdb pdb) {
Validate.notNull(pdb, "pdb cannot be null)");
Objects.requireNonNull(pdb, "pdb cannot be null");
this.pdb = pdb;
}
/**
* Returns the {@link List}<{@link ImageSectionHeader}>.
* Returns the {@link List}&lt;{@link ImageSectionHeader}&gt;.
* @return the imageSectionHeaders.
*/
public List<ImageSectionHeader> getImageSectionHeaders() {

View file

@ -24,17 +24,10 @@ import ghidra.pdb.PdbException;
*/
public class DebugHeader {
//==============================================================================================
// Internals
//==============================================================================================
private long headerVersion;
private long headerLength;
private long dataLength;
//==============================================================================================
// API
//==============================================================================================
/**
* Returns the version of the header.
* @return the header version.

View file

@ -40,9 +40,6 @@ public class DenseIntegerArray {
List<Integer> array = new ArrayList<>();
//==============================================================================================
// API
//==============================================================================================
/**
* Deserializes this {@link DenseIntegerArray}.
* @param reader {@link PdbByteReader} from which to deserialize the data.

View file

@ -67,8 +67,8 @@ public class GlobalSymbolInformation {
List<Integer> hashBucketOffsets = new ArrayList<>();
Set<SymbolHashRecord> hashRecords = new TreeSet<>();
List<Integer> symbolOffsets = new ArrayList<>();
Map<Integer, Integer> mapTableOffsetToTargetOffset = new HashMap<>();
Map<Integer, Integer> sectionNumToAbsoluteOffset = new HashMap<>();
Map<Integer, Integer> targetOffsetsByTableOffset = new HashMap<>();
Map<Integer, Integer> absoluteOffsetsBySectionNumber = new HashMap<>();
List<AbstractMsSymbol> symbols = new ArrayList<>();
@ -159,7 +159,7 @@ public class GlobalSymbolInformation {
*/
void deserialize(int streamNumber, boolean pub, TaskMonitor monitor)
throws IOException, PdbException, CancelledException {
if (pdb.minimalDebugInfo) {
if (pdb.hasMinimalDebugInfo()) {
hashRecordsBitMapLength = 0x8000;
numExtraBytes = 0; // I believe;
numHashRecords = 0x3ffff;
@ -184,7 +184,7 @@ public class GlobalSymbolInformation {
PdbByteReader thunkMapReader = reader.getSubPdbByteReader(thunkMapLength);
deserializeThunkMap(thunkMapReader, monitor);
/**
/*
* See note in {@link #deserializePubHeader(PdbByteReader)} regarding spurious data
* for numSections. Because of this, we will assume the rest of the data in the
* reader belongs to the section map and set the appropriate variable values here.
@ -268,7 +268,7 @@ public class GlobalSymbolInformation {
monitor.checkCanceled();
int targetOffset = reader.parseInt();
int mapTableOffset = count * thunkSize + offsetThunkTable;
mapTableOffsetToTargetOffset.put(mapTableOffset, targetOffset);
targetOffsetsByTableOffset.put(mapTableOffset, targetOffset);
}
}
@ -286,7 +286,7 @@ public class GlobalSymbolInformation {
int offset = reader.parseInt();
int section = reader.parseUnsignedShortVal();
reader.skip(2); // padding
sectionNumToAbsoluteOffset.put(section, offset);
absoluteOffsetsBySectionNumber.put(section, offset);
}
}
@ -354,7 +354,7 @@ public class GlobalSymbolInformation {
numSections = (int) val;
// Calculated values.
/**
/*
* We should calculate and store these as long values, but
* {@link #PdbByteReader.getSubPdbByteReader(int)} does not support long, so we are
* checking here and throwing exception if we cannot support it.
@ -455,6 +455,7 @@ public class GlobalSymbolInformation {
* by a flat set of hash buckets that will be set at those indices in the order provided.
* @param reader {@link PdbByteReader} containing the data buffer to process.
* @param monitor {@link TaskMonitor} used for checking cancellation.
* @throws PdbException Upon not enough data left to parse.
* @throws CancelledException Upon user cancellation.
*/
private void deserializedCompressedHashBuckets(PdbByteReader reader, TaskMonitor monitor)
@ -514,16 +515,16 @@ public class GlobalSymbolInformation {
/**
* Generates a list of symbols from the information that we have.
* @param monitor {@link TaskMonitor} used for checking cancellation.
* @throws PdbException Upon not enough data left to parse.
* @throws PdbException Upon PDB corruption.
* @throws CancelledException Upon user cancellation.
*/
private void generateSymbolsList(TaskMonitor monitor) throws PdbException, CancelledException {
symbols = new ArrayList<>();
Map<Long, AbstractMsSymbol> map = pdb.getDatabaseInterface().getSymbolMap();
Map<Long, AbstractMsSymbol> symbolsByOffset = pdb.getDatabaseInterface().getSymbolsByOffset();
for (SymbolHashRecord record : hashRecords) {
monitor.checkCanceled();
long offset = record.getOffset();
AbstractMsSymbol symbol = map.get(offset);
AbstractMsSymbol symbol = symbolsByOffset.get(offset);
if (symbol == null) {
throw new PdbException("PDB corrupted");
}

View file

@ -23,17 +23,10 @@ import ghidra.pdb.PdbException;
*/
public class ImageFunctionEntry {
//==============================================================================================
// Internals
//==============================================================================================
private long startingAddress;
private long endingAddress;
private long endOfPrologueAddress;
//==============================================================================================
// API
//==============================================================================================
/**
* Returns the starting address.
* @return the starting address.

View file

@ -17,8 +17,7 @@ package ghidra.pdb.pdbreader;
import java.io.IOException;
import java.io.Writer;
import org.apache.commons.lang3.Validate;
import java.util.Objects;
import ghidra.pdb.PdbByteReader;
import ghidra.pdb.PdbException;
@ -32,20 +31,14 @@ import ghidra.pdb.PdbException;
*/
public class ImageSectionHeader {
//==============================================================================================
// Internals
//==============================================================================================
private AbstractPdb pdb;
//==============================================================================================
// API
//==============================================================================================
/**
* Constructor.
* @param pdb {@link AbstractPdb} to which this type belongs.
*/
public ImageSectionHeader(AbstractPdb pdb) {
Validate.notNull(pdb, "pdb cannot be null)");
Objects.requireNonNull(pdb, "pdb cannot be null");
this.pdb = pdb;
}

View file

@ -24,9 +24,6 @@ public class ItemProgramInterfaceParser extends TypeProgramInterfaceParser {
private static final int ITEM_PROGRAM_INTERFACE_STREAM_NUMBER = 4;
//==============================================================================================
// Internal Data Methods
//==============================================================================================
/**
* Returns the standard stream number that contains the serialized Item Program Interface.
* @return The standard stream number that contains the Item Program Interface.

View file

@ -23,17 +23,10 @@ import ghidra.pdb.PdbException;
*/
public class LinkerUnwindInfo {
//==============================================================================================
// Internals
//==============================================================================================
private int version; // unsigned short
private int flags; // unsigned short
private long dataLength; // unsigned int
//==============================================================================================
// API
//==============================================================================================
/**
* Returns the version.
* @return the version.

View file

@ -15,7 +15,7 @@
*/
package ghidra.pdb.pdbreader;
import org.apache.commons.lang3.Validate;
import java.util.Objects;
import ghidra.pdb.PdbByteReader;
import ghidra.pdb.PdbException;
@ -34,7 +34,7 @@ public class ModuleInformation500 extends AbstractModuleInformation {
// API
//==============================================================================================
public ModuleInformation500(AbstractPdb pdb) {
Validate.notNull(pdb, "pdb cannot be null)");
Objects.requireNonNull(pdb, "pdb cannot be null");
this.pdb = pdb;
sectionContribution = new SectionContribution400();
}

View file

@ -15,7 +15,7 @@
*/
package ghidra.pdb.pdbreader;
import org.apache.commons.lang3.Validate;
import java.util.Objects;
import ghidra.pdb.PdbByteReader;
import ghidra.pdb.PdbException;
@ -34,7 +34,7 @@ public class ModuleInformation600 extends AbstractModuleInformation {
// API
//==============================================================================================
public ModuleInformation600(AbstractPdb pdb) {
Validate.notNull(pdb, "pdb cannot be null)");
Objects.requireNonNull(pdb, "pdb cannot be null");
this.pdb = pdb;
sectionContribution = new SectionContribution600();
}

View file

@ -16,10 +16,7 @@
package ghidra.pdb.pdbreader;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang3.Validate;
import java.util.*;
import ghidra.pdb.PdbByteReader;
import ghidra.pdb.PdbException;
@ -49,11 +46,11 @@ public class NameTable {
private DenseIntegerArray deletedList = new DenseIntegerArray();
private String[] names;
private int[] streamNumbers;
private Map<String, Integer> mapNameToStreamNumber = new HashMap<>();
private Map<Integer, String> mapStreamNumberToName = new HashMap<>();
private Map<Integer, Map<Integer, String>> mapStreamNumberToStringTable = new HashMap<>();
private Map<String, Integer> streamNumbersByName = new HashMap<>();
private Map<Integer, String> namesByStreamNumber = new HashMap<>();
private Map<Integer, Map<Integer, String>> stringTablesByStreamNumber = new HashMap<>();
private Map<Integer, String> namesMap;
private Map<Integer, String> namesByOffset;
//==============================================================================================
// API
@ -63,7 +60,7 @@ public class NameTable {
* @param pdb {@link AbstractPdb} that owns this Name Table.
*/
public NameTable(AbstractPdb pdb) {
Validate.notNull(pdb, "pdb cannot be null)");
Objects.requireNonNull(pdb, "pdb cannot be null");
this.pdb = pdb;
}
@ -73,7 +70,7 @@ public class NameTable {
* @return Name retrieved for the index.
*/
public String getNameFromStreamNumber(int index) {
return mapStreamNumberToName.get(index);
return namesByStreamNumber.get(index);
}
/**
@ -82,7 +79,7 @@ public class NameTable {
* @return Index of the name.
*/
public int getStreamNumberFromName(String name) {
Integer x = mapNameToStreamNumber.getOrDefault(name, -1);
Integer x = streamNumbersByName.getOrDefault(name, -1);
return x;
}
@ -93,10 +90,10 @@ public class NameTable {
* @return Name found at offset.
*/
public String getNameStringFromOffset(int offset) {
if (namesMap == null) {
if (namesByOffset == null) {
return null;
}
return namesMap.get(offset);
return namesByOffset.get(offset);
}
/**
@ -106,10 +103,10 @@ public class NameTable {
* @param name Name part of pair.
*/
public void forTestingOnlyAddOffsetNamePair(int offset, String name) {
if (namesMap == null) {
namesMap = new HashMap<>();
if (namesByOffset == null) {
namesByOffset = new HashMap<>();
}
namesMap.put(offset, name);
namesByOffset.put(offset, name);
}
@ -176,8 +173,8 @@ public class NameTable {
pdb.getPdbReaderOptions().getOneByteCharset());
streamNumbers[i] = streamNumber;
names[i] = name;
mapStreamNumberToName.put(streamNumber, name);
mapNameToStreamNumber.put(name, streamNumber);
namesByStreamNumber.put(streamNumber, name);
streamNumbersByName.put(name, streamNumber);
}
deserializeNameTableStreams(monitor);
}
@ -198,7 +195,7 @@ public class NameTable {
throws IOException, PdbException, CancelledException {
for (int streamNumber : streamNumbers) {
monitor.checkCanceled();
Map<Integer, String> mapOffsetToString = new HashMap<>();
Map<Integer, String> stringsByOffset = new HashMap<>();
PdbByteReader reader = pdb.getReaderForStreamNumber(streamNumber, monitor);
if (reader.getLimit() >= 12) {
long hdrMagic = reader.parseUnsignedIntVal();
@ -213,7 +210,7 @@ public class NameTable {
monitor.checkCanceled();
int offset = stringReader.getIndex();
String string = stringReader.parseNullTerminatedUtf8String();
mapOffsetToString.put(offset, string);
stringsByOffset.put(offset, string);
}
// TODO: ? process the rest of the data in reader ?
break;
@ -229,11 +226,11 @@ public class NameTable {
// TODO: unknown format
}
}
mapStreamNumberToStringTable.put(streamNumber, mapOffsetToString);
stringTablesByStreamNumber.put(streamNumber, stringsByOffset);
}
int namesStreamNumber = getStreamNumberFromName("/names");
namesMap = mapStreamNumberToStringTable.get(namesStreamNumber);
namesByOffset = stringTablesByStreamNumber.get(namesStreamNumber);
}
@ -283,17 +280,17 @@ public class NameTable {
}
builder.append("}\n");
builder.append("------------------------------------------------------------\n");
for (String name : mapNameToStreamNumber.keySet()) {
for (String name : streamNumbersByName.keySet()) {
builder.append(name);
builder.append(" : ");
builder.append(mapNameToStreamNumber.get(name));
builder.append(streamNumbersByName.get(name));
builder.append("\n");
}
builder.append("------------------------------------------------------------\n");
for (int streamNumber : mapStreamNumberToName.keySet()) {
for (int streamNumber : namesByStreamNumber.keySet()) {
builder.append(streamNumber);
builder.append(" : ");
builder.append(mapStreamNumberToName.get(streamNumber));
builder.append(namesByStreamNumber.get(streamNumber));
builder.append("\n");
}
// TODO: output map entries for each table.

View file

@ -24,9 +24,6 @@ import ghidra.pdb.PdbException;
*/
public class Offset16 extends AbstractOffset {
//==============================================================================================
// Abstract Methods
//==============================================================================================
@Override
protected int doParse(PdbByteReader reader) throws PdbException {
return reader.parseUnsignedShortVal();

View file

@ -24,9 +24,6 @@ import ghidra.pdb.PdbException;
*/
public class Offset32 extends AbstractOffset {
//==============================================================================================
// Abstract Methods
//==============================================================================================
@Override
protected int doParse(PdbByteReader reader) throws PdbException {
return reader.parseInt();

View file

@ -16,8 +16,7 @@
package ghidra.pdb.pdbreader;
import java.io.IOException;
import org.apache.commons.lang3.Validate;
import java.util.Objects;
import ghidra.pdb.PdbException;
import ghidra.pdb.msfreader.AbstractMsf;
@ -42,9 +41,6 @@ public class PdbParser {
public static final int VC110_ID = 20091201; // 0x01329141
public static final int VC140_ID = 20140508; // 0x013351dc
//==============================================================================================
// API
//==============================================================================================
/**
* Static method to open a PDB file, determine its version, and return an {@link AbstractPdb}
* appropriate for that version; it will not have been deserialized. The main method
@ -59,11 +55,11 @@ public class PdbParser {
* @throws PdbException on parsing issues.
* @throws CancelledException Upon user cancellation.
*/
public static AbstractPdb parse(String filename, PdbReaderOptions pdbOptions, TaskMonitor monitor)
throws IOException, PdbException, CancelledException {
Validate.notNull(filename, "filename cannot be null)");
Validate.notNull(pdbOptions, "pdbOptions cannot be null)");
Validate.notNull(monitor, "monitor cannot be null)");
public static AbstractPdb parse(String filename, PdbReaderOptions pdbOptions,
TaskMonitor monitor) throws IOException, PdbException, CancelledException {
Objects.requireNonNull(filename, "filename cannot be null");
Objects.requireNonNull(pdbOptions, "pdbOptions cannot be null");
Objects.requireNonNull(monitor, "monitor cannot be null");
// Do not do a try with resources here, as the msf must live within the PDB that is
// created below.

View file

@ -54,7 +54,7 @@ public class PdbReaderOptions extends Exception {
}
/**
* Returns list of Charsets that encode one byte characters.
* Returns list of Charsets names that encode one byte characters.
* @return Charsets that encode one byte characters.
*/
public static List<String> getOneByteCharsetNames() {
@ -62,7 +62,7 @@ public class PdbReaderOptions extends Exception {
}
/**
* Returns list of Charsets that encode two byte characters.
* Returns list of Charsets names that encode two byte characters.
* @return Charsets that encode two byte characters.
*/
public static List<String> getTwoByteCharsetNames() {
@ -133,7 +133,7 @@ public class PdbReaderOptions extends Exception {
* Enable/disable developmental debug.
* @param debug {@code true} to turn debug on; default is {@code false}.
*/
public void setEnabled(boolean debug) {
public void setDebug(boolean debug) {
this.debug = debug;
}
@ -141,7 +141,7 @@ public class PdbReaderOptions extends Exception {
* Returns true if debug is "on."
* @return {@code true} if debug is "on."
*/
public boolean doDebug() {
public boolean isDebug() {
return debug;
}

View file

@ -26,18 +26,11 @@ import ghidra.pdb.PdbException;
*/
public class RvaVaDebugHeader extends DebugHeader {
//==============================================================================================
// Internals
//==============================================================================================
private long relativeVirtualAddressDataBase;
private BigInteger virtualAddressImageBase;
private long unsignedIntReserved1;
private long unsignedIntReserved2;
//==============================================================================================
// API
//==============================================================================================
/**
* Returns the relative virtual address data base.
* @return the relative virtual address data base.

View file

@ -27,9 +27,6 @@ import ghidra.pdb.PdbException;
*/
public class SegmentMapDescription {
//==============================================================================================
// Internals
//==============================================================================================
private int flags;
private int ovl;
private int group;
@ -39,10 +36,6 @@ public class SegmentMapDescription {
private long segOffset;
private long segLength;
//==============================================================================================
// API
//==============================================================================================
/**
* Returns the segment offset.
* @return The offset of the segment.
@ -75,9 +68,6 @@ public class SegmentMapDescription {
segLength = substreamReader.parseUnsignedIntVal();
}
//==============================================================================================
// Package-Protected Internals
//==============================================================================================
/**
* Dumps the {@link SegmentMapDescription}. This method is for debugging only.
* @return {@link String} of pretty output.

View file

@ -21,6 +21,7 @@ import ghidra.pdb.PdbException;
/**
* This class represents a Symbol Hash Record used by Global Symbol Information and Public
* Symbol Information.
* @see GlobalSymbolInformation
*/
public class SymbolHashRecord implements Comparable<SymbolHashRecord> {
@ -28,6 +29,7 @@ public class SymbolHashRecord implements Comparable<SymbolHashRecord> {
private int referenceCount;
/**
* Parses the contents of of this record.
* @param reader {@link PdbByteReader} from which to deserialize the data.
* @throws PdbException Upon not enough data left to parse.
*/

View file

@ -17,8 +17,6 @@ package ghidra.pdb.pdbreader;
import java.util.*;
import org.apache.commons.lang3.Validate;
import ghidra.pdb.*;
import ghidra.pdb.pdbreader.symbol.*;
import ghidra.util.exception.CancelledException;
@ -255,7 +253,7 @@ public class SymbolParser {
* @param pdb {@link AbstractPdb} that owns the Symbols to be parsed.
*/
public SymbolParser(AbstractPdb pdb) {
Validate.notNull(pdb, "pdb cannot be null)");
Objects.requireNonNull(pdb, "pdb cannot be null");
this.pdb = pdb;
}
@ -266,10 +264,8 @@ public class SymbolParser {
public String getNewSymbolTypesLog() {
StringBuilder builder = new StringBuilder();
DelimiterState ds = new DelimiterState("New Symbol IDs Seen: ", ",");
/**
* We are creating the sorted set now, as we are willing to incur the cost of a sorted
* set now, but do not want to incur too much debug cost for adding to the
* {@link newSymbolTypesSeen} when not doing debug.
/*
* Sort these before printing to avoid sorting performance hit when logging is not used.
*/
Set<Integer> sortedSet = new TreeSet<>(newSymbolTypesSeen);
for (Integer val : sortedSet) {
@ -299,6 +295,7 @@ public class SymbolParser {
/**
* Deserializes an {@link AbstractMsSymbol} from the {@link PdbByteReader} and returns it.
* @param symbolTypeId the PDB ID for the symbol type to be parsed.
* @param reader {@link PdbByteReader} from which to deserialize the symbol record.
* @return {@link AbstractMsSymbol} that was parsed.
* @throws PdbException upon error parsing a field.

View file

@ -19,8 +19,6 @@ import java.io.IOException;
import java.io.Writer;
import java.util.*;
import org.apache.commons.lang3.Validate;
import ghidra.pdb.PdbByteReader;
import ghidra.pdb.PdbException;
import ghidra.pdb.pdbreader.symbol.AbstractMsSymbol;
@ -36,24 +34,18 @@ import ghidra.util.task.TaskMonitor;
*/
public class SymbolRecords {
//==============================================================================================
// Internals
//==============================================================================================
private AbstractPdb pdb;
private Map<Long, AbstractMsSymbol> symbolMap;
private List<Map<Long, AbstractMsSymbol>> moduleSymbols = new ArrayList<>();
private Map<Long, AbstractMsSymbol> symbolsByOffset;
private List<Map<Long, AbstractMsSymbol>> moduleSymbolsByOffset = new ArrayList<>();
private int comprehensiveSymbolCount = 0;
private List<AbstractMsSymbol> comprehensiveSymbolList = new ArrayList<>();
//==============================================================================================
// API
//==============================================================================================
/**
* Constructor.
* @param pdb {@link AbstractPdb} to which the {@link SymbolRecords} belong.
*/
public SymbolRecords(AbstractPdb pdb) {
Validate.notNull(pdb, "pdb cannot be null)");
Objects.requireNonNull(pdb, "pdb cannot be null");
this.pdb = pdb;
}
@ -85,26 +77,23 @@ public class SymbolRecords {
/**
* Returns the list of symbols.
* @return {@link Map}<{@link Long},{@link AbstractMsSymbol}> of buffer offsets to
* @return {@link Map}&lt;{@link Long},{@link AbstractMsSymbol}&gt; of buffer offsets to
* symbols.
*/
protected Map<Long, AbstractMsSymbol> getSymbolMap() {
return symbolMap;
protected Map<Long, AbstractMsSymbol> getSymbolsByOffset() {
return symbolsByOffset;
}
/**
* Returns the buffer-offset-to-symbol map for the module as specified by moduleNumber.
* @param moduleNumber The number ID of the module for which to return the list.
* @return {@link Map}<{@link Long},{@link AbstractMsSymbol}> of buffer offsets to
* @return {@link Map}&lt;{@link Long},{@link AbstractMsSymbol}&gt; of buffer offsets to
* symbols for the specified module.
*/
protected Map<Long, AbstractMsSymbol> getModuleSymbolMap(int moduleNumber) {
return moduleSymbols.get(moduleNumber);
protected Map<Long, AbstractMsSymbol> getModuleSymbolsByOffset(int moduleNumber) {
return moduleSymbolsByOffset.get(moduleNumber);
}
//==============================================================================================
// Package-Protected Internals
//==============================================================================================
/**
* Deserializes the {@link SymbolRecords} from the stream noted in the DBI header.
* @param monitor {@link TaskMonitor} used for checking cancellation.
@ -117,11 +106,11 @@ public class SymbolRecords {
int streamNumber;
PdbByteReader reader;
streamNumber = pdb.databaseInterface.getSymbolRecordsStreamNumber();
streamNumber = pdb.getDatabaseInterface().getSymbolRecordsStreamNumber();
reader = pdb.getReaderForStreamNumber(streamNumber, monitor);
symbolMap = deserializeSymbolRecords(reader, monitor);
symbolsByOffset = deserializeSymbolRecords(reader, monitor);
for (AbstractModuleInformation module : pdb.databaseInterface.moduleInformationList) {
for (AbstractModuleInformation module : pdb.getDatabaseInterface().moduleInformationList) {
streamNumber = module.getStreamNumberDebugInformation();
if (streamNumber != 0xffff) {
// System.out.println("\n\nStreamNumber: " + streamNumber);
@ -130,9 +119,9 @@ public class SymbolRecords {
int sizeDebug = module.getSizeLocalSymbolsDebugInformation();
sizeDebug -= x; //TODO: seems right, but need to evaluate this
PdbByteReader debugReader = reader.getSubPdbByteReader(sizeDebug);
Map<Long, AbstractMsSymbol> moduleSymbolsMap =
Map<Long, AbstractMsSymbol> oneModuleSymbolsByOffset =
deserializeSymbolRecords(debugReader, monitor);
moduleSymbols.add(moduleSymbolsMap);
moduleSymbolsByOffset.add(oneModuleSymbolsByOffset);
// PdbByteReader rest = reader.getSubPdbByteReader(reader.numRemaining());
// System.out.println(rest.dump());
@ -143,7 +132,7 @@ public class SymbolRecords {
// TODO: figure out the rest of the bytes in the stream (index of reader)
}
else {
moduleSymbols.add(null);
moduleSymbolsByOffset.add(null);
}
}
@ -151,12 +140,11 @@ public class SymbolRecords {
/**
* Deserializes the {@link AbstractMsSymbol} symbols from the {@link PdbByteReader} and
* returns a {@link Map}<{@link Long},{@link AbstractMsSymbol}> of buffer offsets to
* returns a {@link Map}&lt;{@link Long},{@link AbstractMsSymbol}&gt; of buffer offsets to
* symbols.
* @param reader {@link PdbByteReader} containing the symbol records to deserialize.
* @param monitor {@link TaskMonitor} used for checking cancellation.
// * @return {@link AbstractMsSymbol} symbols.
* @return map of buffer offsets to {@link AbstractMsSymbol} symbols.
* @return map of buffer offsets to {@link AbstractMsSymbol symbols}.
* @throws PdbException Upon not enough data left to parse.
* @throws CancelledException Upon user cancellation.
*/
@ -164,7 +152,7 @@ public class SymbolRecords {
TaskMonitor monitor) throws PdbException, CancelledException {
//System.out.println(reader.dump(0x400));
SymbolParser parser = pdb.getSymbolParser();
Map<Long, AbstractMsSymbol> mySymbolMap = new TreeMap<>();
Map<Long, AbstractMsSymbol> mySymbolsByOffset = new TreeMap<>();
while (reader.hasMore()) {
monitor.checkCanceled();
@ -196,11 +184,11 @@ public class SymbolRecords {
new CategoryIndex(CategoryIndex.Category.SYMBOL, comprehensiveSymbolCount));
AbstractMsSymbol symbol = parser.parse(recordReader);
pdb.popDependencyStack();
mySymbolMap.put((long) offset, symbol);
mySymbolsByOffset.put((long) offset, symbol);
comprehensiveSymbolList.add(symbol);
comprehensiveSymbolCount++;
}
return mySymbolMap;
return mySymbolsByOffset;
}
/**
@ -210,9 +198,9 @@ public class SymbolRecords {
*/
protected void dump(Writer writer) throws IOException {
writer.write("SymbolRecords-----------------------------------------------\n");
writer.write(dumpSymbolMap(symbolMap));
for (int i = 0; i < moduleSymbols.size(); i++) {
Map<Long, AbstractMsSymbol> map = moduleSymbols.get(i);
writer.write(dumpSymbolMap(symbolsByOffset));
for (int i = 0; i < moduleSymbolsByOffset.size(); i++) {
Map<Long, AbstractMsSymbol> map = moduleSymbolsByOffset.get(i);
if (map != null) {
writer.write("Module(" + i + ") List:\n");
writer.write(dumpSymbolMap(map));
@ -221,22 +209,18 @@ public class SymbolRecords {
writer.write("\nEnd SymbolRecords-------------------------------------------\n");
}
//==============================================================================================
// Internal Data Methods
//==============================================================================================
/**
* Debug method for dumping the symbols from a symbol map
* @param map the {@link Map}<{@link Long},{@link AbstractMsSymbol}> to dump.
* @param mySymbolsByOffset the {@link Map}&lt;{@link Long},{@link AbstractMsSymbol}&gt; to dump.
* @return {@link String} of pretty output of symbols dumped.
*/
protected String dumpSymbolMap(Map<Long, AbstractMsSymbol> map) {
protected String dumpSymbolMap(Map<Long, AbstractMsSymbol> mySymbolsByOffset) {
StringBuilder builder = new StringBuilder();
builder.append("SymbolMap---------------------------------------------------");
for (Long offset : map.keySet()) {
AbstractMsSymbol symbol = map.get(offset);
for (Map.Entry<Long, AbstractMsSymbol> entry : mySymbolsByOffset.entrySet()) {
builder.append("\n------------------------------------------------------------\n");
builder.append(String.format("Offset: 0X%08X\n", offset));
builder.append(symbol);
builder.append(String.format("Offset: 0X%08X\n", entry.getKey()));
builder.append(entry.getValue());
}
builder.append("\nEnd SymbolMap-----------------------------------------------\n");
return builder.toString();

View file

@ -24,9 +24,6 @@ import ghidra.pdb.PdbException;
*/
public class TypeIndex16 extends AbstractTypeIndex {
//==============================================================================================
// Abstract Methods
//==============================================================================================
@Override
protected int doParse(PdbByteReader reader) throws PdbException {
return reader.parseUnsignedShortVal();

View file

@ -28,9 +28,6 @@ import ghidra.pdb.PdbException;
*/
public class TypeIndex32 extends AbstractTypeIndex {
//==============================================================================================
// Abstract Methods
//==============================================================================================
@Override
protected int doParse(PdbByteReader reader) throws PdbException {
return reader.parseInt();

View file

@ -17,8 +17,6 @@ package ghidra.pdb.pdbreader;
import java.util.*;
import org.apache.commons.lang3.Validate;
import ghidra.pdb.*;
import ghidra.pdb.pdbreader.type.*;
import ghidra.util.exception.CancelledException;
@ -193,7 +191,7 @@ public class TypeParser {
* @param pdb {@link AbstractPdb} that owns the Symbols to be parsed.
*/
public TypeParser(AbstractPdb pdb) {
Validate.notNull(pdb, "pdb cannot be null)");
Objects.requireNonNull(pdb, "pdb cannot be null");
this.pdb = pdb;
}
@ -204,10 +202,8 @@ public class TypeParser {
public String getNewDataTypesLog() {
StringBuilder builder = new StringBuilder();
DelimiterState ds = new DelimiterState("New Symbol IDs Seen: ", ",");
/**
* We are creating the sorted set now, as we are willing to incur the cost of a sorted
* set now, but do not want to incur too much debug cost for adding to the
* {@link newDataTypesSeen} when not doing debug.
/*
* Sort these before printing to avoid sorting performance hit when logging is not used.
*/
Set<Integer> sortedSet = new TreeSet<>(newDataTypesSeen);
for (Integer val : sortedSet) {
@ -237,6 +233,7 @@ public class TypeParser {
/**
* Deserializes an {@link AbstractMsType} from the {@link PdbByteReader} and returns it.
* @param dataTypeId the PDB ID for the symbol type to be parsed.
* @param reader {@link PdbByteReader} from which to deserialize the data.
* @return {@link AbstractMsType} parsed.
* @throws PdbException upon error parsing a field.

View file

@ -230,7 +230,7 @@ public abstract class AbstractCompile2MsSymbol extends AbstractMsSymbol {
}
/**
* Returns the {@link List}<{@link AbstractString}> of additional strings.
* Returns the {@link List}&lt;{@link AbstractString}&gt; of additional strings.
* @return List of additional strings.
*/
public List<AbstractString> getStringList() {

View file

@ -29,7 +29,6 @@ import ghidra.pdb.pdbreader.*;
public abstract class AbstractDataHighLevelShaderLanguageSymbolInternals
extends AbstractSymbolInternals {
//==============================================================================================
/**
* Implementing class is required to parse these four fields in the
* {@link #parse(PdbByteReader)} method.
@ -39,7 +38,6 @@ public abstract class AbstractDataHighLevelShaderLanguageSymbolInternals
protected HLSLRegisterType registerType;
protected AbstractString name;
//==============================================================================================
/**
* Constructor for this symbol internals.
* @param pdb {@link AbstractPdb} to which this symbol belongs.

View file

@ -15,7 +15,7 @@
*/
package ghidra.pdb.pdbreader.symbol;
import org.apache.commons.lang3.Validate;
import java.util.Objects;
import ghidra.pdb.AbstractParsableItem;
import ghidra.pdb.PdbByteReader;
@ -84,7 +84,7 @@ public abstract class AbstractMsSymbol extends AbstractParsableItem {
* @param reader {@link PdbByteReader} from which this symbol is deserialized.
*/
AbstractMsSymbol(AbstractPdb pdb, PdbByteReader reader) {
Validate.notNull(pdb, "pdb cannot be null)");
Objects.requireNonNull(pdb, "pdb cannot be null");
this.pdb = pdb;
// The following commented-out code is used when trying to discern which SYMBOL types were
// not yet implemented.

View file

@ -15,7 +15,7 @@
*/
package ghidra.pdb.pdbreader.symbol;
import org.apache.commons.lang3.Validate;
import java.util.Objects;
import ghidra.pdb.*;
import ghidra.pdb.pdbreader.AbstractPdb;
@ -40,7 +40,7 @@ public abstract class AbstractSymbolInternals extends AbstractParsableItem {
* @param pdb {@link AbstractPdb} to which this symbol belongs.
*/
public AbstractSymbolInternals(AbstractPdb pdb) {
Validate.notNull(pdb, "pdb cannot be null)");
Objects.requireNonNull(pdb, "pdb cannot be null");
this.pdb = pdb;
create();
}

View file

@ -78,8 +78,8 @@ public class EnvironmentBlockMsSymbol extends AbstractMsSymbol {
}
/**
* Note: this method returns {@link List}<{@link AbstractString}> instead of
* {@link List}<{@link String}>
* Note: this method returns {@link List}&lt;{@link AbstractString}&gt; instead of
* {@link List}&lt;{@link String}&gt;
* @return {@String} list.
*/
public List<AbstractString> getStringList() {

View file

@ -90,7 +90,7 @@ public class InlinedFunctionCallsiteMsSymbol extends AbstractMsSymbol {
}
/**
* Returns {@link List}<{@link InstructionAnnotation}>.
* Returns {@link List}&lt;{@link InstructionAnnotation}&gt;.
* @return Instruction annotations.
*/
public List<InstructionAnnotation> getBinaryAnnotationOpcodeList() {

View file

@ -58,7 +58,7 @@ public class UnknownX1168MsSymbol extends AbstractUnknownMsSymbol {
}
/**
* Returns {@link List}<{@link Integer}> of what appears to be type indices.
* Returns {@link List}&lt;{@link Integer}&gt; of what appears to be type indices.
* @return Possible type indices.
*/
public List<Integer> getTypeIndexList() {

View file

@ -46,7 +46,7 @@ public abstract class AbstractArgumentsListMsType extends AbstractMsType {
}
/**
* Returns {@link List}<{@link AbstractTypeIndex}> of indices in the argument list.
* Returns {@link List}&lt;{@link AbstractTypeIndex}&gt; of indices in the argument list.
* @return Indices of arguments in the arguments list.
*/
public List<AbstractTypeIndex> getArgTypeIndexList() {

View file

@ -62,7 +62,7 @@ public abstract class AbstractFieldListMsType extends AbstractMsType {
}
/**
* Returns the (ordered?) {@link List}<{@link AbstractMsType}> of types in the namespace.
* Returns the (ordered?) {@link List}&lt;{@link AbstractMsType}&gt; of types in the namespace.
* @return List of types in the namespace.
*/
public List<AbstractMsType> getNamespaceList() {
@ -70,7 +70,7 @@ public abstract class AbstractFieldListMsType extends AbstractMsType {
}
/**
* Returns the (ordered?) {@link List}<{@link AbstractMsType}> of type members types of
* Returns the (ordered?) {@link List}&lt;{@link AbstractMsType}&gt; of type members types of
* this field list.
* @return Field list.
*/
@ -79,7 +79,7 @@ public abstract class AbstractFieldListMsType extends AbstractMsType {
}
/**
* Returns the (ordered?) {@link List}<{@link AbstractMsType}> of other types. (We have
* Returns the (ordered?) {@link List}&lt;{@link AbstractMsType}&gt; of other types. (We have
* separated these out, but are unsure about what they are at this time.)
* @return List of other types.
*/

View file

@ -79,7 +79,7 @@ public abstract class AbstractMemberFunctionMsType extends AbstractMsType {
* Returns the {@link CallingConvention}.
* @return the {@link CallingConvention}.
*/
public CallingConvention getCallingConventionValue() {
public CallingConvention getCallingConvention() {
return callingConvention;
}

View file

@ -15,7 +15,7 @@
*/
package ghidra.pdb.pdbreader.type;
import org.apache.commons.lang3.Validate;
import java.util.Objects;
import ghidra.pdb.*;
import ghidra.pdb.pdbreader.*;
@ -40,7 +40,7 @@ public abstract class AbstractMethodRecordMs extends AbstractParsableItem {
* @throws PdbException Upon not enough data left to parse.
*/
public AbstractMethodRecordMs(AbstractPdb pdb, PdbByteReader reader) throws PdbException {
Validate.notNull(pdb, "pdb cannot be null)");
Objects.requireNonNull(pdb, "pdb cannot be null");
this.pdb = pdb;
procedureRecordNumber = create();
parseFields(reader);

View file

@ -15,7 +15,7 @@
*/
package ghidra.pdb.pdbreader.type;
import org.apache.commons.lang3.Validate;
import java.util.Objects;
import ghidra.pdb.AbstractParsableItem;
import ghidra.pdb.PdbByteReader;
@ -75,7 +75,7 @@ public abstract class AbstractMsType extends AbstractParsableItem {
* @param reader {@link PdbByteReader} from which this type is deserialized.
*/
AbstractMsType(AbstractPdb pdb, PdbByteReader reader) {
Validate.notNull(pdb, "pdb cannot be null)");
Objects.requireNonNull(pdb, "pdb cannot be null");
this.pdb = pdb;
//System.out.println(reader.dump());
}

View file

@ -68,7 +68,7 @@ public abstract class AbstractProcedureMsType extends AbstractMsType {
* Returns the {@link CallingConvention}.
* @return the {@link CallingConvention}.
*/
public CallingConvention getCallingConventionValue() {
public CallingConvention getCallingConvention() {
return callingConvention;
}

View file

@ -47,11 +47,9 @@ public class BuildInfoMsType extends AbstractMsType {
BUILDINFO_STRING[4] = "CommandArguments: ";
}
//==============================================================================================
private int count;
private List<Integer> argsCodeItemId = new ArrayList<>();
//==============================================================================================
/**
* Constructor for this type.
* @param pdb {@link AbstractPdb} to which this type belongs.

View file

@ -27,12 +27,10 @@ public class FunctionMsAttributes extends AbstractParsableItem {
private static final String INSTANCE_CONSTRUCTOR_VIRTUAL_BASE_STRING =
"instance constructor of a class with virtual base";
//==============================================================================================
private boolean hasCPPStyleReturnUDT;
private boolean isInstanceConstructor;
private boolean isInstanceConstructorOfClassWithVirtualBases;
//==============================================================================================
/**
* Constructor for FunctionMsAttributes.
* @param reader {@link PdbByteReader} from which this type is deserialized.

View file

@ -32,10 +32,8 @@ public class LabelMsType extends AbstractMsType {
private static final int ADDRESS_MODE_NEAR = 0;
private static final int ADDRESS_MODE_FAR = 4;
//==============================================================================================
private int mode;
//==============================================================================================
/**
* Constructor for this type.
* @param pdb {@link AbstractPdb} to which this type belongs.

View file

@ -32,7 +32,7 @@ public class PdbByteWriterReaderTest extends AbstractGenericTest {
// Tests
//==============================================================================================
@Test
public void testWriter() throws PdbException {
public void testWriter() throws Exception {
PdbByteWriter writer = new PdbByteWriter();
byte[] byteArrayTest1 = new byte[3];