GT-3571 Numerous changes to datatype resolve/replace addressing

performance and conflict handling.  Corrected composite merge
deficiencies.  Added datatype resolve/equivalence caches for performance
improvement.  Added deferred pointer resolution for structures and
unions.  Corrected datatype parent/child update
This commit is contained in:
ghidra1 2020-02-28 18:45:31 -05:00
parent d5cc72fd14
commit c2d9629f57
113 changed files with 3515 additions and 1853 deletions

View File

@ -32,8 +32,7 @@ import ghidra.framework.plugintool.ModalPluginTool;
import ghidra.framework.plugintool.PluginTool;
import ghidra.framework.plugintool.util.PluginException;
import ghidra.program.model.listing.DomainObjectChangeSet;
import ghidra.util.HelpLocation;
import ghidra.util.Msg;
import ghidra.util.*;
import ghidra.util.exception.CancelledException;
import ghidra.util.task.*;
@ -460,6 +459,16 @@ public abstract class MergeManager implements DomainObjectMergeManager {
}
}
/**
* Display error message dialog in a blocking fashion.
* @param originator message originator
* @param title dialog title
* @param msg dialog message
*/
public static void displayErrorAndWait(Object originator, String title, String msg) {
Swing.runNow(() -> Msg.showError(originator, null, title, msg));
}
/**
* Block until the user completes the current merge operation, or
* cancels the merge process.

View File

@ -22,9 +22,7 @@ import javax.swing.SwingUtilities;
import org.apache.commons.lang3.StringUtils;
import ghidra.app.merge.MergeConstants;
import ghidra.app.merge.MergeResolver;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.app.merge.*;
import ghidra.app.util.HelpTopics;
import ghidra.framework.data.DomainObjectMergeManager;
import ghidra.program.database.data.DataTypeManagerDB;
@ -753,7 +751,7 @@ public class DataTypeMergeManager implements MergeResolver {
/**
* Set category path. If name conflict occurs within new category
* the specified dt will remain within its' current category
* the specified dt will remain within its current category
* @param dt datatype whoose category is to changed
* @param newPath new category path
*/
@ -860,8 +858,8 @@ public class DataTypeMergeManager implements MergeResolver {
if (!myDtAddedList.contains(Long.valueOf(dataTypeID))) {
existingDt = dtms[RESULT].getDataType(dataTypeID);
if (existingDt != null) {
Msg.warn(this, " ** WARNING ** : Unexpectedly found data type \"" +
existingDt.getPathName() + "\" when trying to add it.");
Msg.warn(this, "Unexpectedly found data type \"" + existingDt.getPathName() +
"\" when trying to add it.");
return existingDt;
}
}
@ -923,6 +921,9 @@ public class DataTypeMergeManager implements MergeResolver {
fixUpList.add(new FixUpInfo(id, baseID, -1, resolvedDataTypes));
}
}
else {
resolvedDataTypes.put(baseID, resolvedDt);
}
}
else {
// Added in My, but hasn't processed yet, so fixup later.
@ -1176,6 +1177,10 @@ public class DataTypeMergeManager implements MergeResolver {
if (comps.length != 0) {
lastOffset = comps[comps.length - 1].getOffset();
}
// Track dependency errors to avoid duplicate popups
HashMap<Long, String> badIdDtMsgs = new HashMap<>();
for (DataTypeComponent sourceComp : comps) {
DataType sourceCompDt = sourceComp.getDataType();
BitFieldDataType bfDt = null;
@ -1231,6 +1236,9 @@ public class DataTypeMergeManager implements MergeResolver {
try {
if (resultCompDt != null) {
long dtId = dtms[RESULT].getID(resultCompDt);
String badMsg = badIdDtMsgs.get(Long.valueOf(dtId));
int length = resultCompDt.getLength();
if (length <= 0) {
length = sourceComp.getLength();
@ -1242,7 +1250,7 @@ public class DataTypeMergeManager implements MergeResolver {
destStruct.addBitField(resultCompDt, bfDt.getDeclaredBitSize(),
sourceComp.getFieldName(), comment);
}
else {
else if (badMsg == null) {
try {
// If I have compDt, it should now be from result DTM.
destStruct.add(resultCompDt, length, sourceComp.getFieldName(),
@ -1250,14 +1258,17 @@ public class DataTypeMergeManager implements MergeResolver {
}
catch (IllegalArgumentException e) {
displayError(destStruct, e);
DataType badDt = BadDataType.dataType;
comment = "Couldn't add " + resultCompDt.getDisplayName() +
" here. " + e.getMessage() + " " +
((comment != null) ? (" " + comment) : "");
destStruct.add(badDt, sourceComp.getLength(),
sourceComp.getFieldName(), comment);
badMsg = "Couldn't add " + resultCompDt.getDisplayName() +
" here. " + e.getMessage();
if (e.getCause() instanceof DataTypeDependencyException) {
badIdDtMsgs.put(dtId, badMsg);
}
}
}
if (badMsg != null) {
destStruct.add(BadDataType.dataType, sourceComp.getLength(),
sourceComp.getFieldName(), badMsg + " " + comment);
}
}
else if (bfDt != null) {
destStruct.insertBitFieldAt(sourceComp.getOffset(), sourceComp.getLength(),
@ -1265,33 +1276,40 @@ public class DataTypeMergeManager implements MergeResolver {
sourceComp.getFieldName(), comment);
}
else {
try {
// If I have compDt, it should now be from result DTM.
// If not last component must constrain length to original component size
int offset = sourceComp.getOffset();
if (offset < lastOffset && length > sourceComp.getLength()) {
// The data type is too big, so adjust the component length to what will fit.
int extraBytesNeeded = length - sourceComp.getLength();
length = sourceComp.getLength();
// Output a warning indicating the structure has a data type that doesn't fit.
String message =
"Structure Merge: Not enough undefined bytes to fit " +
resultCompDt.getPathName() + " in structure " +
destStruct.getPathName() + " at offset 0x" +
Integer.toHexString(offset) + "." + "\nIt needs " +
extraBytesNeeded + " more byte(s) to be able to fit.";
Msg.warn(this, message);
if (badMsg == null) {
try {
// If I have compDt, it should now be from result DTM.
// If not last component must constrain length to original component size
int offset = sourceComp.getOffset();
if (offset < lastOffset && length > sourceComp.getLength()) {
// The data type is too big, so adjust the component length to what will fit.
int extraBytesNeeded = length - sourceComp.getLength();
length = sourceComp.getLength();
// Output a warning indicating the structure has a data type that doesn't fit.
String message =
"Structure Merge: Not enough undefined bytes to fit " +
resultCompDt.getPathName() + " in structure " +
destStruct.getPathName() + " at offset 0x" +
Integer.toHexString(offset) + "." + "\nIt needs " +
extraBytesNeeded + " more byte(s) to be able to fit.";
Msg.warn(this, message);
}
destStruct.insertAtOffset(sourceComp.getOffset(), resultCompDt,
length, sourceComp.getFieldName(), comment);
}
catch (IllegalArgumentException e) {
displayError(destStruct, e);
badMsg = "Couldn't add " + resultCompDt.getDisplayName() +
" here. " + e.getMessage();
if (e.getCause() instanceof DataTypeDependencyException) {
badIdDtMsgs.put(dtId, badMsg);
}
}
destStruct.insertAtOffset(sourceComp.getOffset(), resultCompDt, length,
sourceComp.getFieldName(), comment);
}
catch (IllegalArgumentException e) {
displayError(destStruct, e);
DataType badDt = BadDataType.dataType;
comment = "Couldn't add " + resultCompDt.getDisplayName() + " here. " +
e.getMessage() + " " + ((comment != null) ? (" " + comment) : "");
destStruct.insertAtOffset(sourceComp.getOffset(), badDt,
sourceComp.getLength(), sourceComp.getFieldName(), comment);
if (badMsg != null) {
destStruct.insertAtOffset(sourceComp.getOffset(), BadDataType.dataType,
sourceComp.getLength(), sourceComp.getFieldName(),
badMsg + " " + comment);
}
}
}
@ -1352,7 +1370,7 @@ public class DataTypeMergeManager implements MergeResolver {
String msg = "Some of your changes to " + destComposite.getName() +
" cannot be merged.\nProblem: " + e.getMessage();
String typeName = (destComposite instanceof Union) ? "Union" : "Structure";
Msg.showError(this, null, typeName + " Update Failed", msg);
MergeManager.displayErrorAndWait(this, typeName + " Update Failed", msg);
}
private void updateUnion(long sourceDtID, Union sourceDt, Union destUnion,
@ -1836,6 +1854,13 @@ public class DataTypeMergeManager implements MergeResolver {
return false;
}
private int getNumDefinedComponents(Composite c) {
if (c instanceof Structure) {
return ((Structure) c).getNumDefinedComponents();
}
return c.getNumComponents();
}
private boolean compositeDataTypeWasChanged(Composite c1, Composite c2) {
DataTypeManager dtm1 = c1.getDataTypeManager();
DataTypeManager dtm2 = c2.getDataTypeManager();
@ -1843,34 +1868,65 @@ public class DataTypeMergeManager implements MergeResolver {
c1.isDefaultAligned() != c2.isDefaultAligned() ||
c1.isMachineAligned() != c2.isMachineAligned() ||
c1.getMinimumAlignment() != c2.getMinimumAlignment() ||
c1.getPackingValue() != c2.getPackingValue() ||
(!c1.isInternallyAligned() && (c1.getLength() != c2.getLength()))) {
c1.getPackingValue() != c2.getPackingValue()) {
return true;
}
if (c1.getNumComponents() != c2.getNumComponents()) {
int c1ComponentCnt = getNumDefinedComponents(c1);
int c2ComponentCnt = getNumDefinedComponents(c2);
if (c1ComponentCnt != c2ComponentCnt) {
return true;
}
int nComponents = c1.getNumComponents();
for (int i = 0; i < nComponents; i++) {
DataTypeComponent dtc1 = c1.getComponent(i);
DataTypeComponent dtc2 = c2.getComponent(i);
if (dtm1.getID(dtc1.getDataType()) != dtm2.getID(dtc2.getDataType())) {
boolean checkOffsets = false;
if (c1 instanceof Structure) {
if (!((Structure) c1).isInternallyAligned()) {
if (c1.getNumComponents() != c2.getNumComponents()) {
return true;
}
checkOffsets = true;
}
DataTypeComponent flexDtc1 = ((Structure) c1).getFlexibleArrayComponent();
DataTypeComponent flexDtc2 = ((Structure) c2).getFlexibleArrayComponent();
if (flexDtc1 != null && flexDtc2 != null) {
if (isChangedComponent(flexDtc1, flexDtc2, dtm1, dtm2, false)) {
return true;
}
}
else if (flexDtc1 != null || flexDtc2 != null) {
return true;
}
String fname1 = dtc1.getFieldName();
String fname2 = dtc2.getFieldName();
String comment1 = dtc1.getComment();
String comment2 = dtc2.getComment();
if (fname1 != null && !fname1.equals(fname2) ||
fname2 != null && !fname2.equals(fname1) ||
comment1 != null && !comment1.equals(comment2) ||
comment2 != null && !comment2.equals(comment1)) {
}
DataTypeComponent[] c1Components = c1.getDefinedComponents();
DataTypeComponent[] c2Components = c2.getDefinedComponents();
for (int i = 0; i < c1ComponentCnt; i++) {
DataTypeComponent dtc1 = c1Components[i];
DataTypeComponent dtc2 = c2Components[i];
if (isChangedComponent(dtc1, dtc2, dtm1, dtm2, checkOffsets)) {
return true;
}
}
return false;
}
private boolean isChangedComponent(DataTypeComponent dtc1, DataTypeComponent dtc2,
DataTypeManager dtm1, DataTypeManager dtm2, boolean checkOffsets) {
if (checkOffsets && dtc1.getOffset() != dtc2.getOffset()) {
return true;
}
if (dtm1.getID(dtc1.getDataType()) != dtm2.getID(dtc2.getDataType())) {
return true;
}
if (!Objects.equals(dtc1.getFieldName(), dtc2.getFieldName()) ||
!Objects.equals(dtc1.getComment(), dtc2.getComment())) {
return true;
}
return false;
}
private boolean dataTypeSourceWasChanged(long id, DataTypeManager dtm) {
return dataTypeSourceWasChanged(id, dtms[ORIGINAL], dtm);
}
@ -1899,7 +1955,7 @@ public class DataTypeMergeManager implements MergeResolver {
"\n Source Archive = " + sourceArchive2.getName() + " ";
Msg.error(this, msg);
}
if (!SystemUtilities.isEqual(universalID1, universalID2)) {
if (!Objects.equals(universalID1, universalID2)) {
return true;
}
}
@ -2226,10 +2282,9 @@ public class DataTypeMergeManager implements MergeResolver {
UniversalID resultDtUniversalID = resultDt.getUniversalID();
UniversalID myDtUniversalID = myDt.getUniversalID();
// UniversalID can be null if data type is BuiltIn.
if (!resultSourceArchive.getSourceArchiveID()
.equals(
mySourceArchive.getSourceArchiveID()) ||
!SystemUtilities.isEqual(resultDtUniversalID, myDtUniversalID)) {
if (!resultSourceArchive.getSourceArchiveID().equals(
mySourceArchive.getSourceArchiveID()) ||
!Objects.equals(resultDtUniversalID, myDtUniversalID)) {
return false;
}
if (resultDt.isEquivalent(myDt)) {

View File

@ -1,6 +1,5 @@
/* ###
* IP: GHIDRA
* REVIEWED: YES
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -16,13 +15,6 @@
*/
package ghidra.app.merge.datatypes;
import ghidra.app.merge.MergeConstants;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.program.model.data.*;
import ghidra.program.model.data.Enum;
import ghidra.program.model.listing.FunctionSignature;
import ghidra.util.UniversalID;
import java.awt.BorderLayout;
import java.awt.Color;
import java.util.Arrays;
@ -31,6 +23,13 @@ import javax.swing.JPanel;
import javax.swing.JTextPane;
import javax.swing.text.*;
import ghidra.app.merge.MergeConstants;
import ghidra.program.model.data.*;
import ghidra.program.model.data.Enum;
import ghidra.program.model.listing.FunctionSignature;
import ghidra.util.StringUtilities;
import ghidra.util.UniversalID;
/**
* Panel to show the contents of a Data Type.
*
@ -39,56 +38,59 @@ import javax.swing.text.*;
class DataTypePanel extends JPanel {
private static final long serialVersionUID = 1L;
public Color SOURCE_COLOR = new Color(0, 140, 0);
public Color SOURCE_COLOR = new Color(0, 140, 0);
private DataType dataType;
private JTextPane textPane;
private StyledDocument doc;
private SimpleAttributeSet pathAttrSet;
private SimpleAttributeSet nameAttrSet;
private SimpleAttributeSet sourceAttrSet;
private SimpleAttributeSet offsetAttrSet;
private SimpleAttributeSet contentAttrSet;
private SimpleAttributeSet fieldNameAttrSet;
private SimpleAttributeSet commentAttrSet;
private SimpleAttributeSet deletedAttrSet;
DataTypePanel(DataType dataType) {
super(new BorderLayout());
this.dataType = dataType;
create();
}
void setDataType(DataType dataType) {
this.dataType = dataType;
textPane.setText("");
if (dataType instanceof Composite) {
formatCompositeText((Composite)dataType);
formatCompositeText((Composite) dataType);
}
else if (dataType instanceof Enum) {
formatEnumText((Enum)dataType);
formatEnumText((Enum) dataType);
}
else if (dataType instanceof TypeDef) {
formatTypeDefText((TypeDef)dataType);
formatTypeDefText((TypeDef) dataType);
}
else if (dataType instanceof FunctionDefinition) {
formatFunctionDef((FunctionDefinition)dataType);
formatFunctionDef((FunctionDefinition) dataType);
}
else {
formatDataType(dataType);
formatDataType(dataType);
}
textPane.setCaretPosition(0);
}
private void create() {
textPane = new JTextPane();
textPane = new JTextPane();
doc = textPane.getStyledDocument();
add(textPane, BorderLayout.CENTER);
textPane.setEditable(false);
pathAttrSet = new SimpleAttributeSet();
pathAttrSet.addAttribute(StyleConstants.FontFamily, "Tahoma");
pathAttrSet.addAttribute(StyleConstants.FontSize, new Integer(11));
pathAttrSet.addAttribute(StyleConstants.Bold, Boolean.TRUE);
pathAttrSet.addAttribute(StyleConstants.Foreground, MergeConstants.CONFLICT_COLOR);
nameAttrSet = new SimpleAttributeSet();
nameAttrSet.addAttribute(StyleConstants.FontFamily, "Tahoma");
nameAttrSet.addAttribute(StyleConstants.FontSize, new Integer(11));
@ -100,11 +102,16 @@ class DataTypePanel extends JPanel {
sourceAttrSet.addAttribute(StyleConstants.Bold, Boolean.TRUE);
sourceAttrSet.addAttribute(StyleConstants.Foreground, SOURCE_COLOR);
offsetAttrSet = new SimpleAttributeSet();
offsetAttrSet.addAttribute(StyleConstants.FontFamily, "Monospaced");
offsetAttrSet.addAttribute(StyleConstants.FontSize, new Integer(12));
offsetAttrSet.addAttribute(StyleConstants.Foreground, Color.BLACK);
contentAttrSet = new SimpleAttributeSet();
contentAttrSet.addAttribute(StyleConstants.FontFamily, "Monospaced");
contentAttrSet.addAttribute(StyleConstants.FontSize, new Integer(12));
contentAttrSet.addAttribute(StyleConstants.Foreground, Color.BLUE);
fieldNameAttrSet = new SimpleAttributeSet();
fieldNameAttrSet.addAttribute(StyleConstants.FontFamily, "Monospaced");
fieldNameAttrSet.addAttribute(StyleConstants.FontSize, new Integer(12));
@ -120,18 +127,18 @@ class DataTypePanel extends JPanel {
deletedAttrSet.addAttribute(StyleConstants.FontSize, new Integer(12));
deletedAttrSet.addAttribute(StyleConstants.Bold, Boolean.TRUE);
deletedAttrSet.addAttribute(StyleConstants.Foreground, Color.RED);
setDataType(dataType);
}
private void formatPath(DataType dt) {
insertString("Path: " + dt.getCategoryPath()+ "\n\n", pathAttrSet);
insertString("Path: " + dt.getCategoryPath() + "\n\n", pathAttrSet);
}
private void formatSourceArchive(DataType dt) {
insertString("Source Archive: " + getSourceArchiveName(dt) + "\n", sourceAttrSet);
}
private String getSourceArchiveName(DataType dt) {
SourceArchive sourceArchive = dt.getSourceArchive();
UniversalID sourceID = (sourceArchive != null) ? sourceArchive.getSourceArchiveID() : null;
@ -140,96 +147,141 @@ class DataTypePanel extends JPanel {
}
return sourceArchive.getName();
}
private void formatAlignment(Composite composite) {
StringBuffer alignmentBuffer = new StringBuffer();
if (!composite.isInternallyAligned()) {
alignmentBuffer.append( "Unaligned" );
}
else if (composite.isDefaultAligned()) {
alignmentBuffer.append( "Aligned" );
}
else if (composite.isMachineAligned()) {
alignmentBuffer.append( "Machine aligned");
}
else {
long alignment = composite.getMinimumAlignment();
alignmentBuffer.append( "align(" + alignment + ")" );
}
if (composite.isInternallyAligned()) {
long packingValue = composite.getPackingValue();
if (packingValue != Composite.NOT_PACKING) {
alignmentBuffer.append( " pack(" + packingValue + ")" );
}
}
if (!composite.isInternallyAligned()) {
alignmentBuffer.append("Unaligned");
}
else if (composite.isDefaultAligned()) {
alignmentBuffer.append("Aligned");
}
else if (composite.isMachineAligned()) {
alignmentBuffer.append("Machine aligned");
}
else {
long alignment = composite.getMinimumAlignment();
alignmentBuffer.append("align(" + alignment + ")");
}
if (composite.isInternallyAligned()) {
long packingValue = composite.getPackingValue();
if (packingValue != Composite.NOT_PACKING) {
alignmentBuffer.append(" pack(" + packingValue + ")");
}
}
insertString(alignmentBuffer.toString() + "\n\n", sourceAttrSet);
}
// private void formatAlignmentValue(Composite composite) {
// StringBuffer alignmentBuffer = new StringBuffer();
// alignmentBuffer.append( "Alignment: " );
//
// DataTypeManager dataTypeManager = composite.getDataTypeManager();
// DataOrganization dataOrganization = null;
// if (dataTypeManager != null) {
// dataOrganization = dataTypeManager.getDataOrganization();
// }
// if (dataOrganization == null) {
// dataOrganization = DataOrganization.getDefaultOrganization();
// }
// int alignment = dataOrganization.getAlignment(composite, composite.getLength());
// alignmentBuffer.append( "" + alignment );
//
// insertString("\n" + alignmentBuffer.toString() + "\n", sourceAttrSet);
// }
private void insertAlignment(Composite composite) {
StringBuffer alignmentBuffer = new StringBuffer();
alignmentBuffer.append("Alignment: ");
alignmentBuffer.append(Integer.toString(composite.getAlignment()));
insertString(alignmentBuffer.toString() + "\n", sourceAttrSet);
}
private void insertLength(Composite composite) {
StringBuffer lengthBuffer = new StringBuffer();
lengthBuffer.append("Length: ");
lengthBuffer.append(Integer.toString(composite.getLength()));
insertString(lengthBuffer.toString() + "\n", sourceAttrSet);
}
private int max(String str, int length) {
if (str == null) {
return length;
}
return Math.max(str.length(), length);
}
private String getDataTypeName(DataTypeComponent dtc) {
DataType dt = dtc.getDataType();
StringBuilder buffer = new StringBuilder();
buffer.append(dt.getName());
if (dtc.isFlexibleArrayComponent()) {
buffer.append("[0]");
}
else if (dt instanceof BitFieldDataType &&
!((Composite) dtc.getParent()).isInternallyAligned()) {
BitFieldDataType bfDt = (BitFieldDataType) dt;
buffer.append("(");
buffer.append(Integer.toString(bfDt.getBitOffset()));
buffer.append(")");
}
return buffer.toString();
}
private void renderComponent(DataTypeComponent dtc, int dtNameWidth, int fieldNameWidth,
int offsetWidth) {
String fieldName = dtc.getFieldName();
if (fieldName == null) {
fieldName = "";
}
String comment = dtc.getComment();
if (comment == null) {
comment = "";
}
offsetWidth += 2; // factor in 0x prefix
String offsetStr = "";
if (offsetWidth > 0) {
if (!dtc.isFlexibleArrayComponent()) {
offsetStr = "0x" + Integer.toHexString(dtc.getOffset());
offsetStr = StringUtilities.pad(offsetStr, ' ', offsetWidth - offsetStr.length());
offsetStr += ": ";
}
else {
offsetStr = StringUtilities.pad(offsetStr, ' ', offsetWidth + 2);
}
insertString(" " + offsetStr + " ", offsetAttrSet);
}
fieldName = pad(fieldName, fieldNameWidth);
String typeName = pad(getDataTypeName(dtc), dtNameWidth);
insertString(" " + typeName + " ", contentAttrSet);
insertString(fieldName + " ", fieldNameAttrSet);
insertString(comment, commentAttrSet);
insertString("\n", contentAttrSet);
}
private void formatCompositeText(Composite comp) {
formatSourceArchive(comp);
formatPath(comp);
formatAlignment(comp);
insertString(comp.getDisplayName(), nameAttrSet);
insertString(" { \n", contentAttrSet);
DataTypeComponent[] components = comp.getComponents();
int maxLength=0;
int maxFieldNameLength=0;
for (int i=0; i<components.length; i++) {
String name = components[i].getDataType().getDisplayName();
if (name.length() > maxLength) {
maxLength = name.length();
}
String fieldName = components[i].getFieldName();
if (fieldName == null) {
fieldName = " ";
}
if (fieldName.length() > maxFieldNameLength) {
maxFieldNameLength = fieldName.length();
}
}
for (int i=0; i<components.length; i++) {
String fieldName = components[i].getFieldName();
if (fieldName == null) {
fieldName = "";
}
String comment = components[i].getComment();
if (comment == null) {
comment = "";
}
fieldName = pad(fieldName, maxFieldNameLength);
String typeName = pad(components[i].getDataType().getDisplayName(), maxLength);
insertString(" " + typeName + " ", contentAttrSet);
insertString(fieldName + " ", fieldNameAttrSet);
insertString(comment, commentAttrSet);
if (i < components.length-1) {
insertString("\n", contentAttrSet);
}
}
insertString("\n }\n", contentAttrSet);
// formatAlignmentValue(comp);
}
boolean showComponentOffset = false;
DataTypeComponent[] components = comp.getDefinedComponents();
DataTypeComponent flexDtc = null;
if (comp instanceof Structure) {
showComponentOffset = !comp.isInternallyAligned();
flexDtc = ((Structure) comp).getFlexibleArrayComponent();
}
int offsetLength = showComponentOffset ? Integer.toHexString(comp.getLength()).length() : 0;
int maxDtNameLength = 10;
int maxFieldNameLength = 1;
for (int i = 0; i < components.length; i++) {
maxDtNameLength = max(getDataTypeName(components[i]), maxDtNameLength);
maxFieldNameLength = max(components[i].getFieldName(), maxFieldNameLength);
}
if (flexDtc != null) {
maxDtNameLength = max(getDataTypeName(flexDtc), maxDtNameLength);
maxFieldNameLength = max(flexDtc.getFieldName(), maxFieldNameLength);
}
for (int i = 0; i < components.length; i++) {
renderComponent(components[i], maxDtNameLength, maxFieldNameLength, offsetLength);
}
if (flexDtc != null) {
renderComponent(flexDtc, maxDtNameLength, maxFieldNameLength, offsetLength);
}
insertString("}\n\n", contentAttrSet);
insertAlignment(comp);
insertLength(comp);
}
private void formatEnumText(Enum enuum) {
formatSourceArchive(enuum);
@ -238,104 +290,106 @@ class DataTypePanel extends JPanel {
insertString(" { \n", contentAttrSet);
StringBuffer sb = new StringBuffer();
String[] names = enuum.getNames();
String[] names = enuum.getNames();
int maxLength = 0;
for (int i=0; i<names.length; i++) {
for (int i = 0; i < names.length; i++) {
if (names[i].length() > maxLength) {
maxLength = names[i].length();
}
}
}
long[] values = enuum.getValues();
Arrays.sort(values);
for (int i=0; i<values.length; i++) {
for (int i = 0; i < values.length; i++) {
String name = enuum.getName(values[i]);
name = pad(name, maxLength);
sb.append(" " + name + " = 0x" + Long.toHexString(values[i]) + " ");
if (i < values.length-1) {
sb.append("\n");
}
}
name = pad(name, maxLength);
sb.append(" " + name + " = 0x" + Long.toHexString(values[i]) + " ");
if (i < values.length - 1) {
sb.append("\n");
}
}
sb.append("\n }\n");
insertString(sb.toString(), contentAttrSet);
}
}
private void formatTypeDefText(TypeDef td) {
formatSourceArchive(td);
formatPath(td);
insertString(td.getDisplayName(), nameAttrSet);
insertString("\n", contentAttrSet);
insertString(" TypeDef on " + td.getDataType().getDisplayName(),
contentAttrSet);
insertString(" TypeDef on " + td.getDataType().getDisplayName(), contentAttrSet);
}
private void formatFunctionDef(FunctionDefinition fd) {
formatSourceArchive(fd);
formatPath(fd);
ParameterDefinition[] vars = fd.getArguments();
DataType returnType = fd.getReturnType();
insertString(returnType.getDisplayName(), contentAttrSet);
insertString(" " + fd.getDisplayName(), nameAttrSet);
insertString(" (", contentAttrSet);
boolean hasVarArgs = fd.hasVarArgs();
boolean hasVarArgs = fd.hasVarArgs();
if ((vars.length == 0) && !hasVarArgs) {
insertString(")", contentAttrSet);
return;
}
int maxLength = 0;
for (int i=0; i<vars.length; i++) {
for (int i = 0; i < vars.length; i++) {
String typeName = vars[i].getDataType().getDisplayName();
if (typeName.length() > maxLength) {
maxLength = typeName.length();
}
}
}
StringBuffer sb = new StringBuffer();
for (int i=0; i<vars.length; i++) {
for (int i = 0; i < vars.length; i++) {
sb.append("\n");
String name = vars[i].getDataType().getDisplayName();
name = pad(name, maxLength);
name = pad(name, maxLength);
sb.append(" " + name + " " + vars[i].getName());
if ((i < vars.length-1) || (vars.length > 0 && hasVarArgs)) {
if ((i < vars.length - 1) || (vars.length > 0 && hasVarArgs)) {
sb.append(",");
}
}
if (hasVarArgs) {
if (vars.length > 0) {
sb.append( "\n" ).append( " " );
}
sb.append( FunctionSignature.VAR_ARGS_DISPLAY_STRING );
}
if (hasVarArgs) {
if (vars.length > 0) {
sb.append("\n").append(" ");
}
sb.append(FunctionSignature.VAR_ARGS_DISPLAY_STRING);
}
sb.append(")");
insertString(sb.toString(), contentAttrSet);
}
private void formatDataType(DataType dt) {
if (dt == null) {
insertString("\n\nDeleted", deletedAttrSet);
insertString("\n\nDeleted", deletedAttrSet);
return;
}
formatSourceArchive(dt);
formatPath(dt);
insertString(dt.getDisplayName(), nameAttrSet);
}
private String pad(String str, int length) {
StringBuffer sb = new StringBuffer(str);
int len = length - str.length();
for (int i=0; i<len; i++) {
for (int i = 0; i < len; i++) {
sb.append(" ");
}
return sb.toString();
}
private void insertString(String str, SimpleAttributeSet attrSet) {
int offset = doc.getLength();
try {
doc.insertString(offset, str, attrSet);
} catch (BadLocationException e1) {
}
}
catch (BadLocationException e1) {
}
}
}

View File

@ -28,8 +28,8 @@ import docking.widgets.checkbox.GCheckBox;
import docking.widgets.label.GIconLabel;
import ghidra.app.merge.MergeConstants;
import ghidra.app.merge.util.ConflictCountPanel;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.framework.data.DomainObjectMergeManager;
import ghidra.program.model.data.SourceArchive;
import resources.ResourceManager;
/**

View File

@ -1,6 +1,5 @@
/* ###
* IP: GHIDRA
* REVIEWED: YES
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -16,8 +15,8 @@
*/
package ghidra.app.merge.datatypes;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.program.model.data.ArchiveType;
import ghidra.program.model.data.SourceArchive;
import java.awt.BorderLayout;
import java.awt.Color;

View File

@ -24,7 +24,6 @@ import javax.swing.table.AbstractTableModel;
import docking.widgets.fieldpanel.support.FieldRange;
import docking.widgets.fieldpanel.support.FieldSelection;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.docking.settings.SettingsImpl;
import ghidra.program.model.data.*;
import ghidra.util.*;
@ -1062,7 +1061,7 @@ class CompositeViewerModel extends AbstractTableModel implements DataTypeManager
* @return true if a sub-component is in the indicated category.
*/
boolean hasSubDtInCategory(Composite parentDt, String catPath) {
DataTypeComponent components[] = parentDt.getComponents();
DataTypeComponent components[] = parentDt.getDefinedComponents();
// FUTURE Add a structure to keep track of which composites were searched so they aren't searched multiple times.
for (DataTypeComponent component : components) {
DataType subDt = component.getDataType();
@ -1087,7 +1086,7 @@ class CompositeViewerModel extends AbstractTableModel implements DataTypeManager
* @return true if the composite data type has the data type as a sub-component.
*/
protected boolean hasSubDt(Composite parentDt, DataTypePath dtPath) {
DataTypeComponent components[] = parentDt.getComponents();
DataTypeComponent components[] = parentDt.getDefinedComponents();
for (DataTypeComponent component : components) {
DataType subDt = component.getDataType();

View File

@ -22,7 +22,6 @@ import javax.swing.JLabel;
import docking.widgets.table.GTableCellRenderer;
import docking.widgets.table.GTableCellRenderingData;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.app.util.ToolTipUtils;
import ghidra.program.model.data.*;
import ghidra.util.HTMLUtilities;

View File

@ -25,7 +25,6 @@ import org.apache.commons.lang3.StringUtils;
import docking.widgets.label.GDHtmlLabel;
import ghidra.app.plugin.core.datamgr.archive.DataTypeManagerHandler;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.app.util.ToolTipUtils;
import ghidra.app.util.html.HTMLDataTypeRepresentation;
import ghidra.app.util.html.MissingArchiveDataTypeHTMLRepresentation;

View File

@ -17,9 +17,9 @@ package ghidra.app.plugin.core.datamgr.actions;
import ghidra.app.plugin.core.datamgr.*;
import ghidra.app.plugin.core.datamgr.archive.DataTypeManagerHandler;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.app.plugin.core.datamgr.tree.ArchiveNode;
import ghidra.program.model.data.DataTypeManager;
import ghidra.program.model.data.SourceArchive;
import ghidra.util.HelpLocation;
import java.util.List;

View File

@ -26,11 +26,9 @@ import docking.widgets.tree.GTree;
import docking.widgets.tree.GTreeNode;
import ghidra.app.plugin.core.datamgr.*;
import ghidra.app.plugin.core.datamgr.archive.DataTypeManagerHandler;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.app.plugin.core.datamgr.tree.DataTypeNode;
import ghidra.app.plugin.core.datamgr.util.DataTypeUtils;
import ghidra.program.model.data.DataType;
import ghidra.program.model.data.DataTypeManager;
import ghidra.program.model.data.*;
import ghidra.util.Msg;
import resources.MultiIcon;
import resources.ResourceManager;

View File

@ -26,7 +26,6 @@ import docking.widgets.tree.GTree;
import docking.widgets.tree.GTreeNode;
import ghidra.app.plugin.core.datamgr.DataTypeManagerPlugin;
import ghidra.app.plugin.core.datamgr.DataTypesActionContext;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.app.plugin.core.datamgr.tree.DataTypeNode;
import ghidra.framework.plugintool.PluginTool;
import ghidra.program.database.data.ProgramDataTypeManager;

View File

@ -26,10 +26,10 @@ import docking.widgets.OptionDialog;
import docking.widgets.tree.GTreeState;
import ghidra.app.plugin.core.datamgr.*;
import ghidra.app.plugin.core.datamgr.archive.DataTypeManagerHandler;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.app.plugin.core.datamgr.tree.ArchiveNode;
import ghidra.app.plugin.core.datamgr.tree.DataTypeArchiveGTree;
import ghidra.program.model.data.DataTypeManager;
import ghidra.program.model.data.SourceArchive;
import ghidra.util.*;
import ghidra.util.exception.CancelledException;
import ghidra.util.task.*;

View File

@ -32,8 +32,7 @@ import ghidra.app.plugin.core.datamgr.archive.*;
import ghidra.app.plugin.core.datamgr.tree.DataTypeArchiveGTree;
import ghidra.app.plugin.core.datamgr.tree.DataTypeNode;
import ghidra.app.plugin.core.datamgr.util.DataTypeUtils;
import ghidra.program.model.data.DataType;
import ghidra.program.model.data.DataTypeManager;
import ghidra.program.model.data.*;
import ghidra.util.HTMLUtilities;
import ghidra.util.exception.CancelledException;
import ghidra.util.task.*;

View File

@ -92,7 +92,7 @@ public class FindReferencesToFieldAction extends DockingAction {
}
Composite composite = (Composite) dataTypeNode.getDataType();
DataTypeComponent[] components = composite.getComponents();
DataTypeComponent[] components = composite.getDefinedComponents();
List<String> names = new ArrayList<>();
for (DataTypeComponent dataTypeComponent : components) {
if (dataTypeComponent.isBitFieldComponent()) {

View File

@ -18,9 +18,9 @@ package ghidra.app.plugin.core.datamgr.actions;
import ghidra.app.plugin.core.datamgr.DataTypeManagerPlugin;
import ghidra.app.plugin.core.datamgr.DataTypeSyncInfo;
import ghidra.app.plugin.core.datamgr.archive.DataTypeManagerHandler;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.app.plugin.core.datamgr.tree.ArchiveNode;
import ghidra.program.model.data.DataTypeManager;
import ghidra.program.model.data.SourceArchive;
import ghidra.util.HelpLocation;
import java.util.List;

View File

@ -1,6 +1,5 @@
/* ###
* IP: GHIDRA
* REVIEWED: YES
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -18,11 +17,9 @@ package ghidra.app.plugin.core.datamgr.actions;
import ghidra.app.plugin.core.datamgr.*;
import ghidra.app.plugin.core.datamgr.archive.DataTypeManagerHandler;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.app.plugin.core.datamgr.tree.DataTypeNode;
import ghidra.app.plugin.core.datamgr.util.DataTypeUtils;
import ghidra.program.model.data.DataType;
import ghidra.program.model.data.DataTypeManager;
import ghidra.program.model.data.*;
import ghidra.util.Msg;
import javax.swing.tree.TreePath;

View File

@ -25,10 +25,10 @@ import docking.widgets.OptionDialog;
import docking.widgets.tree.GTreeState;
import ghidra.app.plugin.core.datamgr.*;
import ghidra.app.plugin.core.datamgr.archive.DataTypeManagerHandler;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.app.plugin.core.datamgr.tree.ArchiveNode;
import ghidra.app.plugin.core.datamgr.tree.DataTypeArchiveGTree;
import ghidra.program.model.data.DataTypeManager;
import ghidra.program.model.data.SourceArchive;
import ghidra.util.HelpLocation;
import ghidra.util.Msg;
import ghidra.util.exception.CancelledException;

View File

@ -17,10 +17,10 @@ package ghidra.app.plugin.core.datamgr.actions;
import ghidra.app.plugin.core.datamgr.*;
import ghidra.app.plugin.core.datamgr.archive.DataTypeManagerHandler;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.app.plugin.core.datamgr.tree.ArchiveNode;
import ghidra.app.plugin.core.datamgr.tree.DataTypeArchiveGTree;
import ghidra.program.model.data.DataTypeManager;
import ghidra.program.model.data.SourceArchive;
import ghidra.util.HelpLocation;
import ghidra.util.Msg;
import docking.ActionContext;

View File

@ -17,9 +17,9 @@ package ghidra.app.plugin.core.datamgr.actions;
import ghidra.app.plugin.core.datamgr.*;
import ghidra.app.plugin.core.datamgr.archive.DataTypeManagerHandler;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.app.plugin.core.datamgr.tree.ArchiveNode;
import ghidra.program.model.data.DataTypeManager;
import ghidra.program.model.data.SourceArchive;
import ghidra.util.HelpLocation;
import java.util.List;

View File

@ -26,11 +26,9 @@ import docking.widgets.tree.GTree;
import docking.widgets.tree.GTreeNode;
import ghidra.app.plugin.core.datamgr.*;
import ghidra.app.plugin.core.datamgr.archive.DataTypeManagerHandler;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.app.plugin.core.datamgr.tree.DataTypeNode;
import ghidra.app.plugin.core.datamgr.util.DataTypeUtils;
import ghidra.program.model.data.DataType;
import ghidra.program.model.data.DataTypeManager;
import ghidra.program.model.data.*;
import resources.MultiIcon;
import resources.ResourceManager;
import resources.icons.EmptyIcon;

View File

@ -18,8 +18,8 @@ package ghidra.app.plugin.core.datamgr.actions;
import ghidra.app.plugin.core.datamgr.DataTypeManagerPlugin;
import ghidra.app.plugin.core.datamgr.DataTypesActionContext;
import ghidra.app.plugin.core.datamgr.archive.DataTypeManagerHandler;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.program.model.data.DataTypeManager;
import ghidra.program.model.data.SourceArchive;
import ghidra.util.HelpLocation;
import docking.ActionContext;
import docking.action.DockingAction;

View File

@ -20,8 +20,7 @@ import java.io.IOException;
import javax.swing.ImageIcon;
import ghidra.program.model.data.ArchiveType;
import ghidra.program.model.data.DataTypeManager;
import ghidra.program.model.data.*;
import ghidra.util.UniversalID;
import ghidra.util.exception.DuplicateFileException;
import resources.ResourceManager;

View File

@ -29,7 +29,6 @@ import docking.widgets.combobox.GhidraComboBox;
import docking.widgets.label.GLabel;
import ghidra.app.plugin.core.compositeeditor.*;
import ghidra.app.plugin.core.datamgr.DataTypeManagerPlugin;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.app.plugin.core.function.EditFunctionSignatureDialog;
import ghidra.framework.model.DomainObject;
import ghidra.framework.plugintool.PluginTool;

View File

@ -22,7 +22,6 @@ import javax.swing.Icon;
import docking.widgets.tree.GTree;
import docking.widgets.tree.GTreeNode;
import ghidra.app.plugin.core.datamgr.archive.Archive;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.program.model.data.*;
import ghidra.util.task.SwingUpdateManager;

View File

@ -28,7 +28,8 @@ import docking.widgets.tree.*;
import docking.widgets.tree.internal.DefaultGTreeDataTransformer;
import docking.widgets.tree.support.GTreeRenderer;
import ghidra.app.plugin.core.datamgr.*;
import ghidra.app.plugin.core.datamgr.archive.*;
import ghidra.app.plugin.core.datamgr.archive.DataTypeManagerHandler;
import ghidra.app.plugin.core.datamgr.archive.FileArchive;
import ghidra.framework.model.*;
import ghidra.framework.plugintool.PluginTool;
import ghidra.program.model.data.*;
@ -269,7 +270,7 @@ public class DataTypeArchiveGTree extends GTree {
}
private void addCompositeStrings(Composite composite, List<String> results) {
DataTypeComponent[] components = composite.getComponents();
DataTypeComponent[] components = composite.getDefinedComponents();
for (DataTypeComponent component : components) {
String fieldName = component.getFieldName();
if (fieldName != null) {

View File

@ -24,7 +24,6 @@ import org.apache.commons.lang3.StringUtils;
import docking.widgets.tree.GTree;
import docking.widgets.tree.GTreeNode;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.app.plugin.core.datamgr.util.DataTypeUtils;
import ghidra.app.util.ToolTipUtils;
import ghidra.program.model.data.*;

View File

@ -20,7 +20,6 @@ import java.util.Stack;
import java.util.function.Consumer;
import java.util.function.Predicate;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.app.services.DataTypeReference;
import ghidra.app.services.DataTypeReferenceFinder;
import ghidra.program.model.address.*;
@ -819,7 +818,7 @@ public final class ReferenceUtils {
}
Composite composite = (Composite) baseParent;
DataTypeComponent[] components = composite.getComponents();
DataTypeComponent[] components = composite.getDefinedComponents();
String name = path.pop();
for (DataTypeComponent component : components) {
if (component.getFieldName().equals(name)) {
@ -1021,7 +1020,7 @@ public final class ReferenceUtils {
}
Composite c = (Composite) dt;
DataTypeComponent[] components = c.getComponents();
DataTypeComponent[] components = c.getDefinedComponents();
for (DataTypeComponent component : components) {
if (SystemUtilities.isEqual(component.getFieldName(), fieldName)) {
return component;

View File

@ -243,7 +243,7 @@ public abstract class BiDirectionDataType extends StructureDataType
@Override
public DataTypeComponentImpl insertAtOffset(int offset, DataType dataType, int length,
String newName, String comment) {
String newName, String comment) throws IllegalArgumentException {
if (offset < splitOffset - negativeLength || offset >= splitOffset + positiveLength) {
throw new IllegalArgumentException(
"Offset " + offset + " is not in " + getDisplayName() + ".");
@ -304,7 +304,7 @@ public abstract class BiDirectionDataType extends StructureDataType
@Override
public DataTypeComponent addPositive(DataType dataType, int length, String newName,
String comment) {
String comment) throws IllegalArgumentException {
validateDataType(dataType);
checkAncestry(dataType);
@ -325,7 +325,7 @@ public abstract class BiDirectionDataType extends StructureDataType
@Override
public DataTypeComponent addNegative(DataType dataType, int length, String newName,
String comment) {
String comment) throws IllegalArgumentException {
validateDataType(dataType);
checkAncestry(dataType);
@ -682,7 +682,7 @@ public abstract class BiDirectionDataType extends StructureDataType
@Override
public DataTypeComponent replace(int index, DataType dataType, int length, String newName,
String comment) {
String comment) throws ArrayIndexOutOfBoundsException, IllegalArgumentException {
if (index < 0 || index >= numComponents) {
throw new ArrayIndexOutOfBoundsException(index);
}
@ -695,7 +695,7 @@ public abstract class BiDirectionDataType extends StructureDataType
@Override
public DataTypeComponent replaceAtOffset(int offset, DataType dataType, int length,
String newName, String comment) {
String newName, String comment) throws IllegalArgumentException {
if (offset < splitOffset - negativeLength || offset >= splitOffset + positiveLength) {
throw new IllegalArgumentException(
"Offset " + offset + " is not in " + getDisplayName() + ".");

View File

@ -0,0 +1,366 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.app.util.bin.format.dwarf4.next;
import java.util.*;
import ghidra.program.model.data.*;
import static ghidra.program.model.data.DataTypeConflictHandler.ConflictResult.*;
/**
* This {@link DataTypeConflictHandler conflict handler} attempts to match
* conflicting {@link Composite composite data types} (structure or union) when
* they have compatible data layouts. (Data types that are exactly equiv will
* not be subjected to conflict handling and will never reach here)
* <p>
* A default/empty sized structure, or structures with the same size are
* candidates for matching.
* <p>
* Structures that have a subset of the other's field definition are candidates
* for matching.
* <p>
* When a candidate data type is matched with an existing data type, this
* conflict handler will specify that the new data type is:
* <p>
* <ul>
* <li>discarded and replaced by the existing data type
* ({@link ConflictResult#USE_EXISTING})
* <li>used to overwrite the existing data type
* ({@link ConflictResult#REPLACE_EXISTING})
* </ul>
* or the candidate data type was <b>NOT</b> matched with an existing data type,
* and the new data type is:
* <p>
* <ul>
* <li>kept, but renamed with a .conflictNNNN suffix to make it unique
* ({@link ConflictResult#RENAME_AND_ADD})
* </ul>
* <b>NOTE:</b> structures with alignment (instead of being statically laid out)
* are not treated specially and will not match other aligned or non-aligned
* structures.
*
*/
class DWARFDataTypeConflictHandler extends DataTypeConflictHandler {
static final DWARFDataTypeConflictHandler INSTANCE = new DWARFDataTypeConflictHandler();
private DWARFDataTypeConflictHandler() {
// do not create instances of this class
}
/**
* Returns true if src can overwrite the target composite based on size
*
* @param src
* @param target
* @return
*/
private boolean isSizeCompatible(Composite src, Composite target) {
return target.isNotYetDefined() || (src.getLength() == target.getLength());
}
/**
* Determines if the given composite is either empty or filled with default
* values (no defined components).
*
* @param composite composite to check
* @return true if empty or default and false otherwise
*/
private boolean isCompositeDefault(Composite composite) {
return composite.isNotYetDefined()
|| ((composite instanceof Structure) && ((Structure) composite).getNumDefinedComponents() == 0);
}
private boolean isCompositePart(Composite full, Composite part, Set<Long> visitedDataTypes) {
if (full instanceof Structure && part instanceof Structure) {
return isStructurePart((Structure) full, (Structure) part, visitedDataTypes);
} else if (full instanceof Union && part instanceof Union) {
return isUnionPart((Union) full, (Union) part, visitedDataTypes);
} else {
return false;
}
}
/**
* Returns true if one union is a subset of another union.
* <p>
* Each component of the candidate partial union must be present in the 'full'
* union and must be 'equiv'.
* <p>
* Order of components is ignored, except for unnamed components, which receive
* a default name created using their ordinal position.
*
* @param full {@link Union} datatype that is expected to be a
* superset of the next param.
* @param part {@link Union} datatype that is expected to be a
* subset of the previous param.
* @param visitedDataTypes identity map of datatypes to prevent loops.
* @return true if part is a subset (or equal) to full.
*/
private boolean isUnionPart(Union full, Union part, Set<Long> visitedDataTypes) {
if (full.getLength() < part.getLength()) {
return false;
}
Map<String, DataTypeComponent> fullComponentsByName = new HashMap<>();
for (DataTypeComponent dtc : full.getComponents()) {
String name = dtc.getFieldName();
if (name == null) {
name = dtc.getDefaultFieldName();
}
fullComponentsByName.put(name, dtc);
}
for (DataTypeComponent dtc : part.getComponents()) {
String name = dtc.getFieldName();
if (name == null) {
name = dtc.getDefaultFieldName();
}
DataTypeComponent fullDTC = fullComponentsByName.get(name);
if (fullDTC == null) {
return false;
}
DataType partDT = dtc.getDataType();
DataType fullDT = fullDTC.getDataType();
if (doRelaxedCompare(partDT, fullDT, visitedDataTypes) == RENAME_AND_ADD) {
return false;
}
}
return true;
}
/*
* Returns true if one structure is a partial definition of another structure.
* <p> Each defined component in the candidate partial structure must be present
* in the 'full' structure and must be equiv. <p> The order and sparseness of
* the candidate partial structure is not important, only that all of its
* defined components are present in the full structure. <p>
*/
private boolean isStructurePart(Structure full, Structure part, Set<Long> visitedDataTypes) {
// Both structures should be equal in length
if (full.getLength() != part.getLength()) {
return false;
}
DataTypeComponent[] partComps = part.getDefinedComponents();
// Find a match in the full structure's component list for each
// component in the partial structure.
// Use resolveConflict() == USE_EXISTING to test for equiv in addition to
// isEquiv().
// Ensure that two components in the partial struct don't map to the same
// component in the full structure.
for (DataTypeComponent partDTC : partComps) {
DataTypeComponent fullDTCAt = (partDTC.getDataType() instanceof BitFieldDataType)
? getBitfieldByOffsets(full, partDTC)
: full.getComponentAt(partDTC.getOffset());
if (fullDTCAt == null || fullDTCAt.getOffset() != partDTC.getOffset()) {
return false;
}
DataType partDT = partDTC.getDataType();
DataType fullDT = fullDTCAt.getDataType();
if (doRelaxedCompare(partDT, fullDT, visitedDataTypes) == RENAME_AND_ADD) {
return false;
}
}
if ( part.getFlexibleArrayComponent() != null ) {
return full.getFlexibleArrayComponent() != null
&& doRelaxedCompare(part.getFlexibleArrayComponent().getDataType(),
full.getFlexibleArrayComponent().getDataType(), visitedDataTypes) != RENAME_AND_ADD;
}
return true;
}
private DataTypeComponent getBitfieldByOffsets(Structure full, DataTypeComponent partDTC) {
DataTypeComponent fullDTC = full.getComponentAt(partDTC.getOffset());
if (fullDTC == null || fullDTC.getOffset() != partDTC.getOffset()) {
return null;
}
BitFieldDataType partBF = (BitFieldDataType) partDTC.getDataType();
int fullNumComp = full.getNumComponents();
for(int fullOrdinal = fullDTC.getOrdinal(); fullOrdinal < fullNumComp; fullOrdinal++) {
fullDTC = full.getComponent(fullOrdinal);
if (fullDTC.getOffset() != partDTC.getOffset()
|| !(fullDTC.getDataType() instanceof BitFieldDataType)) {
return null;
}
BitFieldDataType fullBF = (BitFieldDataType) fullDTC.getDataType();
if ( fullBF.getBitOffset() == partBF.getBitOffset() ) {
return fullDTC;
}
}
return null;
}
/*
* Strict compare will compare its parameters. The contents of these datatypes
* (ie. contents of structs, pointers, arrays) will be compared with relaxed
* typedef checking.
*/
private ConflictResult doStrictCompare(DataType addedDataType, DataType existingDataType,
Set<Long> visitedDataTypes) {
if (!addVisited(existingDataType, addedDataType, visitedDataTypes)) {
return USE_EXISTING;
}
if (existingDataType instanceof Composite && addedDataType instanceof Composite) {
Composite existingComposite = (Composite) existingDataType;
Composite addedComposite = (Composite) addedDataType;
// Check to see if we are adding a default/empty data type
if ((isCompositeDefault(addedComposite)) && isSizeCompatible(existingComposite, addedComposite)) {
return USE_EXISTING;
}
// Check to see if the existing type is a default/empty data type
if ((isCompositeDefault(existingComposite)) && isSizeCompatible(addedComposite, existingComposite)) {
return REPLACE_EXISTING;
}
// Check to see if the added type is part of the existing type first to
// generate more USE_EXISTINGS when possible.
if (isCompositePart(existingComposite, addedComposite, visitedDataTypes)) {
return USE_EXISTING;
}
// Check to see if the existing type is a part of the added type
if (isCompositePart(addedComposite, existingComposite, visitedDataTypes)) {
return REPLACE_EXISTING;
}
return RENAME_AND_ADD;
}
if (existingDataType instanceof TypeDef && addedDataType instanceof TypeDef) {
TypeDef addedTypeDef = (TypeDef) addedDataType;
TypeDef existingTypeDef = (TypeDef) existingDataType;
return doRelaxedCompare(addedTypeDef.getBaseDataType(), existingTypeDef.getBaseDataType(),
visitedDataTypes);
}
if (existingDataType instanceof Array && addedDataType instanceof Array) {
Array addedArray = (Array) addedDataType;
Array existingArray = (Array) existingDataType;
if (addedArray.getNumElements() != existingArray.getNumElements()
|| addedArray.getElementLength() != existingArray.getElementLength()) {
return RENAME_AND_ADD;
}
return doRelaxedCompare(addedArray.getDataType(), existingArray.getDataType(), visitedDataTypes);
}
if (existingDataType instanceof Pointer && addedDataType instanceof Pointer) {
return doRelaxedCompare(((Pointer) addedDataType).getDataType(), ((Pointer) existingDataType).getDataType(),
visitedDataTypes);
}
if (existingDataType instanceof FunctionDefinition && addedDataType instanceof FunctionDefinition) {
return compareFuncDef((FunctionDefinition) addedDataType, (FunctionDefinition) existingDataType,
visitedDataTypes);
}
if (existingDataType instanceof BitFieldDataType && addedDataType instanceof BitFieldDataType) {
BitFieldDataType existingBF = (BitFieldDataType) existingDataType;
BitFieldDataType addedBF = (BitFieldDataType) addedDataType;
if (existingBF.getDeclaredBitSize() != addedBF.getDeclaredBitSize()) {
return RENAME_AND_ADD;
}
return existingBF.getPrimitiveBaseDataType().isEquivalent(addedBF.getPrimitiveBaseDataType()) ? USE_EXISTING
: RENAME_AND_ADD;
}
if (existingDataType.isEquivalent(addedDataType)) {
return USE_EXISTING;
}
return RENAME_AND_ADD;
}
private ConflictResult compareFuncDef(FunctionDefinition addedFunc, FunctionDefinition existingFunc,
Set<Long> visitedDataTypes) {
if (doRelaxedCompare(addedFunc.getReturnType(), existingFunc.getReturnType(),
visitedDataTypes) == RENAME_AND_ADD) {
return RENAME_AND_ADD;
}
ParameterDefinition[] addedArgs = addedFunc.getArguments();
ParameterDefinition[] existingArgs = existingFunc.getArguments();
if (addedArgs.length != existingArgs.length) {
return RENAME_AND_ADD;
}
for (int i = 0; i < addedArgs.length; i++) {
ParameterDefinition addedParam = addedArgs[i];
ParameterDefinition existingParam = existingArgs[i];
if (doRelaxedCompare(addedParam.getDataType(), existingParam.getDataType(),
visitedDataTypes) == RENAME_AND_ADD) {
return RENAME_AND_ADD;
}
}
return USE_EXISTING;
}
/*
* Relaxed compare will take liberties in skipping typedefs to try to compare
* the types that the typedef are hiding. This is useful when comparing types
* that were embedded in differently compiled files, where you might end up with
* a raw basetype in one file and a typedef to a basetype in another file.
*/
private ConflictResult doRelaxedCompare(DataType addedDataType, DataType existingDataType,
Set<Long> visitedDataTypes) {
// unwrap typedefs, possibly asymmetrically. (ie. only unwrap added vs.
// existing)
if (addedDataType instanceof TypeDef) {
return doRelaxedCompare(((TypeDef) addedDataType).getBaseDataType(), existingDataType, visitedDataTypes);
}
if (existingDataType instanceof TypeDef) {
return doRelaxedCompare(addedDataType, ((TypeDef) existingDataType).getBaseDataType(), visitedDataTypes);
}
return doStrictCompare(addedDataType, existingDataType, visitedDataTypes);
}
private long getDTPairKey(DataType dataType1, DataType dataType2) {
return ((long) System.identityHashCode(dataType1) << 32)
+ ((long) System.identityHashCode(dataType2) & 0xffffffffL);
}
private boolean addVisited(DataType dataType1, DataType dataType2, Set<Long> visitedDataTypes) {
long key = getDTPairKey(dataType1, dataType2);
return visitedDataTypes.add(key);
}
@Override
public ConflictResult resolveConflict(DataType addedDataType, DataType existingDataType) {
Set<Long> visitedDataTypes = new HashSet<>();
return doStrictCompare(addedDataType, existingDataType, visitedDataTypes);
}
@Override
public boolean shouldUpdate(DataType sourceDataType, DataType localDataType) {
return false;
}
@Override
public DataTypeConflictHandler getSubsequentHandler() {
return this;
}
}

View File

@ -408,8 +408,8 @@ public class DWARFDataTypeImporter {
}
}
DataType result = dataTypeManager.addDataType(enumDT,
DataTypeConflictHandler.REPLACE_EMPTY_STRUCTS_OR_RENAME_AND_ADD_HANDLER);
DataType result =
dataTypeManager.addDataType(enumDT, DWARFDataTypeConflictHandler.INSTANCE);
return new DWARFDataType(result, dni, diea.getOffset());
}
@ -1339,7 +1339,7 @@ public class DWARFDataTypeImporter {
@Override
public String toString() {
return dataType.toString() + "|" + (dni != null ? dni.toString() : "na") + "|" +
return dataType.getName() + " | " + (dni != null ? dni.toString() : "na") + " | " +
hexOffsets();
}

View File

@ -16,38 +16,14 @@
package ghidra.app.util.bin.format.dwarf4.next;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.*;
import ghidra.app.util.bin.format.dwarf4.DIEAggregate;
import ghidra.app.util.bin.format.dwarf4.DWARFException;
import ghidra.app.util.bin.format.dwarf4.DWARFUtil;
import ghidra.app.util.bin.format.dwarf4.DebugInfoEntry;
import ghidra.app.util.bin.format.dwarf4.*;
import ghidra.app.util.bin.format.dwarf4.encoding.DWARFEncoding;
import ghidra.app.util.bin.format.dwarf4.encoding.DWARFTag;
import ghidra.app.util.bin.format.dwarf4.expression.DWARFExpressionException;
import ghidra.app.util.bin.format.dwarf4.next.DWARFDataTypeImporter.DWARFDataType;
import ghidra.program.model.data.AbstractIntegerDataType;
import ghidra.program.model.data.ArrayDataType;
import ghidra.program.model.data.Category;
import ghidra.program.model.data.CategoryPath;
import ghidra.program.model.data.DataType;
import ghidra.program.model.data.DataTypeConflictHandler;
import ghidra.program.model.data.DataTypeManager;
import ghidra.program.model.data.DataTypePath;
import ghidra.program.model.data.FunctionDefinition;
import ghidra.program.model.data.FunctionDefinitionDataType;
import ghidra.program.model.data.GenericCallingConvention;
import ghidra.program.model.data.ParameterDefinition;
import ghidra.program.model.data.ParameterDefinitionImpl;
import ghidra.program.model.data.Pointer;
import ghidra.program.model.data.PointerDataType;
import ghidra.program.model.data.TypedefDataType;
import ghidra.program.model.data.WideChar16DataType;
import ghidra.program.model.data.WideChar32DataType;
import ghidra.program.model.data.*;
import ghidra.program.model.listing.Program;
import ghidra.util.Msg;
import ghidra.util.SystemUtilities;
@ -155,8 +131,8 @@ public class DWARFDataTypeManager {
}
// Commit the DataType to the database
DataType post = dataTypeManager.resolve(pre.dataType,
DataTypeConflictHandler.REPLACE_EMPTY_STRUCTS_OR_RENAME_AND_ADD_HANDLER);
DataType post =
dataTypeManager.resolve(pre.dataType, DWARFDataTypeConflictHandler.INSTANCE);
// While walking the pre and post DataType graph in lockstep, use the mapping of
// pre_impl->offset to cache offset->post_datatype for later re-use.

View File

@ -116,7 +116,7 @@ public class DWARFParser {
CategoryPath origCompositeNSCP =
new CategoryPath(origCategoryPath, compositeDataType.getName());
CategoryPath destCompositeNSCP = new CategoryPath(newCP, compositeDataType.getName());
for (DataTypeComponent component : compositeDataType.getComponents()) {
for (DataTypeComponent component : compositeDataType.getDefinedComponents()) {
DataType dtcDT = component.getDataType();
if (dtcDT instanceof Array || dtcDT instanceof Pointer) {
dtcDT = DataTypeUtils.getNamedBaseDataType(dtcDT);

View File

@ -187,11 +187,10 @@ public class DWARFProgram implements Closeable {
this.nameLengthCutoffSize = Math.max(MIN_NAME_LENGTH_CUTOFF,
Math.min(importOptions.getNameLengthCutoff(), MAX_NAME_LENGTH_CUTOFF));
monitor.setMessage("Reading DWARF debug string table");
this.debugStrings = StringTable.readStringTable(
sectionProvider.getSectionAsByteProvider(DWARFSectionNames.DEBUG_STR));
Msg.info(this, "Read DWARF debug string table, " + debugStrings.getByteCount() + " bytes.");
// Msg.info(this, "Read DWARF debug string table, " + debugStrings.getByteCount() + " bytes.");
this.attributeFactory = new DWARFAttributeFactory(this);

View File

@ -15,7 +15,7 @@
*/
package ghidra.app.util.html;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.program.model.data.SourceArchive;
import ghidra.util.HTMLUtilities;
public class MissingArchiveDataTypeHTMLRepresentation extends HTMLDataTypeRepresentation {

View File

@ -626,7 +626,7 @@ public class DataTypesXmlMgr {
attrs.addAttribute("SIZE", struct.isNotYetDefined() ? 0 : struct.getLength(), true);
writer.startElement("STRUCTURE", attrs);
writeRegularComment(writer, struct.getDescription());
DataTypeComponent[] members = struct.getComponents();
DataTypeComponent[] members = struct.getDefinedComponents();
for (DataTypeComponent member : members) {
writerMember(writer, member);
}

View File

@ -266,7 +266,7 @@ public abstract class PCodeTestAbstractControlBlock {
}
protected int getStructureComponent(Structure testInfoStruct, String fieldName) {
for (DataTypeComponent component : testInfoStruct.getComponents()) {
for (DataTypeComponent component : testInfoStruct.getDefinedComponents()) {
if (fieldName.equals(component.getFieldName())) {
return component.getOffset();
}

View File

@ -20,7 +20,6 @@ import static org.junit.Assert.*;
import org.junit.Assert;
import org.junit.Test;
import docking.widgets.OptionDialog;
import ghidra.program.database.*;
import ghidra.program.model.data.*;
import ghidra.program.model.data.Enum;
@ -297,11 +296,11 @@ public class DataTypeMerge3Test extends AbstractDataTypeMergeTest {
}
});
setErrorsExpected(true);
executeMerge();
executeMerge(true);
close(waitForWindow("Structure Update Failed")); // expected dependency error on Foo
setErrorsExpected(false);
waitForCompletion();
DataTypeManager dtm = resultProgram.getDataTypeManager();
@ -380,12 +379,11 @@ public class DataTypeMerge3Test extends AbstractDataTypeMergeTest {
chooseOption(DataTypeMergeManager.OPTION_LATEST);// LATEST CoolUnion
setErrorsExpected(true);
chooseOption(DataTypeMergeManager.OPTION_MY);// MY Foo
waitForCompletion();
setErrorsExpected(false);
close(waitForWindow("Structure Update Failed")); // expected dependency error on Foo
waitForCompletion();
checkConflictCount(0);
@ -534,21 +532,11 @@ public class DataTypeMerge3Test extends AbstractDataTypeMergeTest {
chooseOption(DataTypeMergeManager.OPTION_LATEST);// Latest CoolUnion
setErrorsExpected(true);
chooseOption(DataTypeMergeManager.OPTION_MY);// My Bar
//
// This last choice shows an error dialog
//
OptionDialog errorDialog =
waitForDialogComponent(null, OptionDialog.class, DEFAULT_WINDOW_TIMEOUT);
close(waitForWindow("Structure Update Failed")); // expected dependency error on Bar
setErrorsExpected(false);
assertNotNull(errorDialog);
errorDialog.close();
window.setVisible(false);
waitForCompletion();
checkConflictCount(0);
@ -1094,6 +1082,172 @@ public class DataTypeMerge3Test extends AbstractDataTypeMergeTest {
checkConflictCount(1);
}
@Test
public void testConflictUpdate7() throws Exception {
TypeDef td = new TypedefDataType(new CategoryPath("/Category1/Category2"), "TD",
IntegerDataType.dataType);
mtf.initialize("notepad2", new ProgramModifierListener() {
@Override
public void modifyLatest(ProgramDB program) {
DataTypeManager dtm = program.getDataTypeManager();
int transactionID = program.startTransaction("test");
try {
Structure s1 =
(Structure) dtm.getDataType(new CategoryPath("/Category1/Category2"),
"Structure_1");
s1.setFlexibleArrayComponent(td, null, null);
}
finally {
program.endTransaction(transactionID, true);
}
}
@Override
public void modifyPrivate(ProgramDB program) {
DataTypeManager dtm = program.getDataTypeManager();
int transactionID = program.startTransaction("test");
try {
Structure s1 =
(Structure) dtm.getDataType(new CategoryPath("/Category1/Category2"),
"Structure_1");
s1.setFlexibleArrayComponent(IntegerDataType.dataType, "flex1", "cmt1");
}
finally {
program.endTransaction(transactionID, true);
}
}
});
executeMerge();
chooseOption(DataTypeMergeManager.OPTION_MY);// MY Structure_1
waitForCompletion();
checkConflictCount(0);
DataTypeManager dtm = resultProgram.getDataTypeManager();
Structure s1 =
(Structure) dtm.getDataType(new CategoryPath("/Category1/Category2"), "Structure_1");
assertNotNull(s1);
DataTypeComponent[] dtcs = s1.getComponents();
assertEquals(4, dtcs.length);
DataTypeComponent flexDtc = s1.getFlexibleArrayComponent();
assertNotNull(flexDtc);
assertTrue(IntegerDataType.class == flexDtc.getDataType().getClass());
assertEquals("flex1", flexDtc.getFieldName());
assertEquals("cmt1", flexDtc.getComment());
}
@Test
public void testConflictUpdate8() throws Exception {
TypeDef td = new TypedefDataType(new CategoryPath("/Category1/Category2"), "TD",
IntegerDataType.dataType);
mtf.initialize("notepad2", new OriginalProgramModifierListener() {
@Override
public void modifyOriginal(ProgramDB program) throws Exception {
DataTypeManager dtm = program.getDataTypeManager();
int transactionID = program.startTransaction("test");
try {
Structure s1 =
(Structure) dtm.getDataType(new CategoryPath("/Category1/Category2"),
"Structure_1");
s1.setFlexibleArrayComponent(IntegerDataType.dataType, null, null);
}
finally {
program.endTransaction(transactionID, true);
}
}
@Override
public void modifyLatest(ProgramDB program) {
DataTypeManager dtm = program.getDataTypeManager();
int transactionID = program.startTransaction("test");
try {
Structure s1 =
(Structure) dtm.getDataType(new CategoryPath("/Category1/Category2"),
"Structure_1");
s1.setFlexibleArrayComponent(td, "flex1", "cmt1");
}
finally {
program.endTransaction(transactionID, true);
}
}
@Override
public void modifyPrivate(ProgramDB program) {
DataTypeManager dtm = program.getDataTypeManager();
int transactionID = program.startTransaction("test");
try {
Structure s1 =
(Structure) dtm.getDataType(new CategoryPath("/Category1/Category2"),
"Structure_1");
s1.insertBitFieldAt(3, 2, 6, td, 2, "bf1", "my bf1");
s1.insertBitFieldAt(3, 2, 4, td, 2, "bf2", "my bf2");
s1.clearFlexibleArrayComponent();
}
catch (InvalidDataTypeException e) {
e.printStackTrace();
Assert.fail();
}
finally {
program.endTransaction(transactionID, true);
}
}
});
executeMerge();
chooseOption(DataTypeMergeManager.OPTION_MY);// MY Structure_1
waitForCompletion();
checkConflictCount(0);
DataTypeManager dtm = resultProgram.getDataTypeManager();
Structure s1 =
(Structure) dtm.getDataType(new CategoryPath("/Category1/Category2"), "Structure_1");
assertNotNull(s1);
DataTypeComponent flexDtc = s1.getFlexibleArrayComponent();
assertNull(flexDtc);
DataTypeComponent[] dtcs = s1.getComponents();
assertEquals(7, dtcs.length);
assertEquals(4, dtcs[3].getOffset()); // base on original 2-byte length 1st byte remains undefined
assertEquals("bf1", dtcs[3].getFieldName());
assertEquals("my bf1", dtcs[3].getComment());
DataType dt = dtcs[3].getDataType();
assertTrue(dt instanceof BitFieldDataType);
BitFieldDataType bfDt = (BitFieldDataType) dt;
assertTrue(td.isEquivalent(bfDt.getBaseDataType()));
assertEquals(2, bfDt.getDeclaredBitSize());
assertEquals(6, bfDt.getBitOffset());
assertEquals(4, dtcs[4].getOffset()); // base on original 2-byte length 1st byte remains undefined
assertEquals("bf2", dtcs[4].getFieldName());
assertEquals("my bf2", dtcs[4].getComment());
dt = dtcs[4].getDataType();
assertTrue(dt instanceof BitFieldDataType);
bfDt = (BitFieldDataType) dt;
assertTrue(td.isEquivalent(bfDt.getBaseDataType()));
assertEquals(2, bfDt.getDeclaredBitSize());
assertEquals(4, bfDt.getBitOffset());
}
@Test
public void testEditUnions() throws Exception {

View File

@ -639,6 +639,8 @@ public class DataTypeMerge4Test extends AbstractDataTypeMergeTest {
chooseOption(DataTypeMergeManager.OPTION_MY);// Foo keeps its Bar, which creates Foo.conflict.
close(waitForWindow("Structure Update Failed")); // expected dependency error on Bar (2 occurances of Bar use)
waitForCompletion();
// should be two .conflict data types
@ -1604,7 +1606,9 @@ public class DataTypeMerge4Test extends AbstractDataTypeMergeTest {
// Conflict on Bar
chooseOption(DataTypeMergeManager.OPTION_MY);// choose MY Bar
chooseOption(DataTypeMergeManager.OPTION_MY);// choose MY Foo
// NOTE: while Foo grows because of Bar it was not explicitly change in
// MY so no conflict should be detected for Foo
waitForCompletion();
@ -2004,9 +2008,6 @@ public class DataTypeMerge4Test extends AbstractDataTypeMergeTest {
Structure foo = (Structure) dtm.getDataType(new CategoryPath("/MISC"), "Foo");
DataTypeComponent[] dtcs = foo.getDefinedComponents();
for (DataTypeComponent dtc : dtcs) {
System.out.println(dtc.getDataType().getDisplayName());
}
assertEquals(3, dtcs.length);
assertEquals("Structure Foo was the wrong size.", 18, foo.getLength());

View File

@ -1011,6 +1011,8 @@ public class DataTypeMerge6Test extends AbstractDataTypeMergeTest {
setupStructureInUnionAndViceVersa();
executeMerge();
close(waitForWindow("Union Update Failed")); // expected dependency error on CoolUnion
waitForCompletion();
DataTypeManager dtm = resultProgram.getDataTypeManager();

View File

@ -30,7 +30,7 @@ import ghidra.util.task.TaskMonitorAdapter;
public class DataTypeMergeUseForAllTest extends AbstractDataTypeMergeTest {
@Test
public void testDataTypeDeletedChangedDoNotUseForAll() throws Exception {
public void testDataTypeDeletedChangedDoNotUseForAll() throws Exception {
setupTestDataTypeDeletedChangedUseForAll();
@ -135,7 +135,7 @@ public class DataTypeMergeUseForAllTest extends AbstractDataTypeMergeTest {
}
@Test
public void testDataTypeDeletedChangedUseForAllPickLatest() throws Exception {
public void testDataTypeDeletedChangedUseForAllPickLatest() throws Exception {
setupTestDataTypeDeletedChangedUseForAll();
@ -170,7 +170,7 @@ public class DataTypeMergeUseForAllTest extends AbstractDataTypeMergeTest {
}
@Test
public void testDataTypeDeletedChangedUseForAllPickMy() throws Exception {
public void testDataTypeDeletedChangedUseForAllPickMy() throws Exception {
setupTestDataTypeDeletedChangedUseForAll();
@ -208,7 +208,7 @@ public class DataTypeMergeUseForAllTest extends AbstractDataTypeMergeTest {
}
@Test
public void testDataTypeDeletedChangedUseForAllPickOriginal() throws Exception {
public void testDataTypeDeletedChangedUseForAllPickOriginal() throws Exception {
setupTestDataTypeDeletedChangedUseForAll();

View File

@ -0,0 +1,498 @@
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.app.util.bin.format.dwarf4.next;
import static org.junit.Assert.*;
import org.junit.*;
import ghidra.program.database.ProgramBuilder;
import ghidra.program.database.ProgramDB;
import ghidra.program.model.data.*;
import ghidra.program.model.data.DataTypeConflictHandler.ConflictResult;
import ghidra.test.AbstractGhidraHeadedIntegrationTest;
/**
* Tests for the {@link DataTypeConflictHandler conflict handler} stuff.
*
*
*/
public class DWARFConflictHandlerTest extends AbstractGhidraHeadedIntegrationTest {
private ProgramDB program;
private DataTypeManager dataMgr;
private int transactionID;
private CategoryPath root = new CategoryPath(CategoryPath.ROOT, "conflict_test");
public DWARFConflictHandlerTest() {
super();
}
private void startTransaction() {
transactionID = program.startTransaction("Test");
}
private void endTransaction() {
program.endTransaction(transactionID, true);
}
@Before
public void setUp() throws Exception {
program = createDefaultProgram(testName.getMethodName(), ProgramBuilder._TOY, this);
dataMgr = program.getDataTypeManager();
startTransaction();
}
@After
public void tearDown() throws Exception {
endTransaction();
program.release(this);
}
private StructureDataType createPopulated(DataTypeManager dtm) {
StructureDataType struct = new StructureDataType(root, "struct1", 0, dtm);
struct.add(new CharDataType(dataMgr), 1, "char1", null);
struct.add(new CharDataType(dataMgr), 1, "char2", null);
return struct;
}
private StructureDataType createPopulated2(DataTypeManager dtm) {
StructureDataType struct = new StructureDataType(root, "struct1", 0, dtm);
struct.add(new CharDataType(dataMgr), 1, "blah1", null);
struct.add(new CharDataType(dataMgr), 1, "blah2", null);
struct.add(new CharDataType(dataMgr), 1, "blah3", null);
struct.add(new CharDataType(dataMgr), 1, "blah4", null);
return struct;
}
private StructureDataType createPopulated2Partial(DataTypeManager dtm) {
StructureDataType struct = createPopulated2(dtm);
struct.clearComponent(2);
struct.clearComponent(1);
return struct;
}
private StructureDataType createStub(DataTypeManager dtm, int size) {
return new StructureDataType(root, "struct1", size, dtm);
}
/**
* Assert a particular ConflictResult outcome when adding two structs to the DTM.
* <p>
* Create a copy of the "addingStruct" before adding it because the impl instance can be
* modified during the conflict resolution by the DTM when it tries to DataType.clone()
* it before renaming the clone().
* <p>
* @param existingStruct
* @param addingStruct
* @param expectedResult
*/
private void assertStruct(Composite existingStruct, Composite addingStruct,
ConflictResult expectedResult) {
DataType existingResult =
dataMgr.addDataType(existingStruct, DWARFDataTypeConflictHandler.INSTANCE);
DataType existingResult_copy = existingResult.copy(null);
DataType addingCopy = addingStruct.copy(null);
DataType addedResult =
dataMgr.addDataType(addingStruct, DWARFDataTypeConflictHandler.INSTANCE);
switch (expectedResult) {
case USE_EXISTING:
assertEquals("DataType name should match", existingResult.getName(),
addedResult.getName());
assertEquals("DataType CategoryPath should match", existingResult.getCategoryPath(),
addedResult.getCategoryPath());
assertEquals("DataType length should match", existingResult.getLength(),
addedResult.getLength());
assertTrue("Added DataType should be equiv to existing DataType",
addedResult.isEquivalent(existingResult));
break;
case REPLACE_EXISTING:
assertEquals("DataType name should match", addingCopy.getName(),
addedResult.getName());
assertEquals("DataType CategoryPath should match", addingCopy.getCategoryPath(),
addedResult.getCategoryPath());
assertEquals("DataType length should match", addingCopy.getLength(),
addedResult.getLength());
assertTrue("Added DataType should be equiv to its impl before it was added",
addedResult.isEquivalent(addingCopy));
assertFalse("Added DataType should not be equiv to existing DataType",
addedResult.isEquivalent(existingResult_copy));
// NOTE: direct member replacement works in most cases
// assertTrue("Overwritten DataType should have a deleted flag",
// existingResult.isDeleted());
break;
case RENAME_AND_ADD:
Assert.assertNotEquals("DataType name should have changed", addingCopy.getName(),
addedResult.getName());
assertEquals("DataType CategoryPath should not changed",
addingCopy.getCategoryPath(), addedResult.getCategoryPath());
assertEquals("DataType length should not change", addingCopy.getLength(),
addedResult.getLength());
break;
}
}
/**
* Tests the {@link DWARFDataTypeConflictHandler#INSTANCE}
* conflict handler to ensure that adding a empty conflicting structure resolves to a previous
* populated structure.
*/
@Test
public void testAddEmptyStructResolveToPopulatedStruct1() {
assertStruct(createPopulated(dataMgr), createStub(dataMgr, 0), ConflictResult.USE_EXISTING);
}
@Test
public void testAddEmptyStructResolveToPopulatedStruct2() {
assertStruct(createPopulated(null), createStub(null, 0), ConflictResult.USE_EXISTING);
}
/**
* Tests the {@link DWARFDataTypeConflictHandler#INSTANCE}
* conflict handler to ensure that adding a populated structure replaces an existing
* 'empty' structure. 'Empty' means either 0 byte length or 1 byte length structs
* as previous versions of Ghidra did not allow truly empty structs.
*/
@Test
public void testAddPopulatedStructOverwriteStub1() {
assertStruct(createStub(dataMgr, 0), createPopulated(dataMgr),
ConflictResult.REPLACE_EXISTING);
}
@Test
public void testAddPopulatedStructOverwriteStub2() {
assertStruct(createStub(null, 0), createPopulated(null), ConflictResult.REPLACE_EXISTING);
}
@Test
public void testAddPopulatedStructOverwriteSameSizedStub() {
StructureDataType populated = createPopulated(dataMgr);
assertStruct(createStub(dataMgr, populated.getLength()), populated,
ConflictResult.REPLACE_EXISTING);
}
@Test
public void testAddStubStructUseSameSizedPopulated() {
StructureDataType populated = createPopulated(dataMgr);
assertStruct(populated, createStub(dataMgr, populated.getLength()),
ConflictResult.USE_EXISTING);
}
@Test
public void testAddStubStructCreateConflict() {
StructureDataType populated = createPopulated(dataMgr);
assertStruct(populated, createStub(dataMgr, populated.getLength() + 1),
ConflictResult.RENAME_AND_ADD);
}
@Test
public void testAddPartialStructResolveToPopulatedStruct() {
assertStruct(createPopulated2(dataMgr), createPopulated2Partial(dataMgr),
ConflictResult.USE_EXISTING);
}
@Test
public void testAddPopulatedStructOverwritePartialStruct() {
assertStruct(createPopulated2Partial(dataMgr), createPopulated2(dataMgr),
ConflictResult.REPLACE_EXISTING);
}
@Test
public void testAddStubUnionResolveToPopulated() {
Union populated = new UnionDataType(root, "union1", dataMgr);
populated.add(new CharDataType(dataMgr), 1, "blah1", null);
populated.add(new IntegerDataType(dataMgr), 4, "blah2", null);
Union stub = new UnionDataType(root, "union1", dataMgr);
assertStruct(populated, stub, ConflictResult.USE_EXISTING);
}
@Test
public void testAddPopulatedUnionOverwriteStub() {
Union populated = new UnionDataType(root, "union1", dataMgr);
populated.add(new CharDataType(dataMgr), 1, "blah1", null);
populated.add(new IntegerDataType(dataMgr), 4, "blah2", null);
Union stub = new UnionDataType(root, "union1", dataMgr);
assertStruct(stub, populated, ConflictResult.REPLACE_EXISTING);
}
@Test
public void testAddPopulatedUnionOverwritePartial() {
Union populated = new UnionDataType(root, "union1", dataMgr);
populated.add(new CharDataType(dataMgr), 1, "blah1", null);
populated.add(new IntegerDataType(dataMgr), 4, "blah2", null);
populated.add(new IntegerDataType(dataMgr), 4, "blah3", null);
Union partial = new UnionDataType(root, "union1", dataMgr);
partial.add(new CharDataType(dataMgr), 1, "blah1", null);
assertStruct(partial, populated, ConflictResult.REPLACE_EXISTING);
}
@Test
public void testAddConflictUnion() {
Union populated = new UnionDataType(root, "union1", dataMgr);
populated.add(new CharDataType(dataMgr), 1, "blah1", null);
populated.add(new IntegerDataType(dataMgr), 4, "blah2", null);
populated.add(new IntegerDataType(dataMgr), 4, "blah3", null);
Union populated2 = new UnionDataType(root, "union1", dataMgr);
populated2.add(new CharDataType(dataMgr), 1, "blahA", null);
assertStruct(populated, populated2, ConflictResult.RENAME_AND_ADD);
}
@Test
public void testAddPartialUnionWithStubStructResolveToExisting() {
Structure s1a = createPopulated(dataMgr);
Union populated = new UnionDataType(root, "union1", dataMgr);
populated.add(new CharDataType(dataMgr), 1, "blah1", null);
populated.add(s1a, s1a.getLength(), "blah2", null);
populated.add(s1a, s1a.getLength(), null, null);
Structure s1b = createStub(dataMgr, 0);
Union partial = new UnionDataType(root, "union1", dataMgr);
partial.add(s1b, s1b.getLength(), "blah2", null);
assertStruct(populated, partial, ConflictResult.USE_EXISTING);
}
/**
* Tests the {@link DWARFDataTypeConflictHandler#INSTANCE}
* conflict handler to ensure that adding a conflicting typedef to a conflicting stub structure
* (when there is already a typedef to a populated structure) correctly uses the
* existing populated structure and existing typedef to the populated structure.
*/
@Test
public void testTypedefToStubUseExistingTypedefToPopulatedStructure() {
StructureDataType populatedStructure = createPopulated(dataMgr);
int origPopStructLen = populatedStructure.getLength();
TypeDef populatedTD = new TypedefDataType(root, "typedef1", populatedStructure, dataMgr);
dataMgr.addDataType(populatedTD, null);
StructureDataType stubStructure = createStub(dataMgr, 0);
TypeDef stubTD = new TypedefDataType(root, "typedef1", stubStructure, dataMgr);
DataType stubTDResult = dataMgr.addDataType(stubTD, DWARFDataTypeConflictHandler.INSTANCE);
assertTrue(stubTDResult instanceof TypeDef);
assertEquals(populatedTD.getPathName(), stubTDResult.getPathName());
DataType stubTDResultRefdDT = ((TypeDef) stubTDResult).getDataType();
assertEquals(stubTDResultRefdDT.getLength(), origPopStructLen);
}
/**
* Tests the {@link DWARFDataTypeConflictHandler#INSTANCE}
* conflict handler to ensure that adding truly conflicting structures and typedefs
* are treated as new data types and are renamed to a different name when added.
*/
@Test
public void testTypedefConflictToConflictStruct() {
StructureDataType struct1a = createPopulated(dataMgr);
TypeDef td1a = new TypedefDataType(root, "typedef1", struct1a, dataMgr);
DataType td1a_result = dataMgr.addDataType(td1a, null);
String td1a_result_path = td1a_result.getPathName();
DataType s1a_result = ((TypeDef) td1a_result).getDataType();
String s1a_result_path = s1a_result.getPathName();
StructureDataType struct1b = createPopulated2(dataMgr);
TypeDef td1b = new TypedefDataType(root, "typedef1", struct1b, dataMgr);
DataType td1b_result = dataMgr.addDataType(td1b, DWARFDataTypeConflictHandler.INSTANCE);
String td1b_result_path = td1b_result.getPathName();
DataType s1b_result = ((TypeDef) td1b_result).getDataType();
String s1b_result_path = s1b_result.getPathName();
Assert.assertNotEquals(td1a_result_path, td1b_result_path);
Assert.assertNotEquals(s1a_result_path, s1b_result_path);
assertFalse(td1a_result.isDeleted());
assertFalse(s1a_result.isDeleted());
}
/**
* Tests the {@link DWARFDataTypeConflictHandler#INSTANCE}
* conflict handler when adding a conflicting typedef impl that is referred to multiple
* times during a single addDataType() call.
* <p>
* Success is if the fields of struct2 are all the same datatype, probably named typedef1.conflict.
* <p>
* A failure would be if the fields of struct2 are different types, ie. field1 is typedef1.conflict,
* field2 is typedef1.conflict1, field3 is typedef1.conflict2.
* <p>
* This test is useful because the typedef impl that is referred to multiple times causes
* equiv checking and conflict resolution each time it is referred to, and if a precondition
* for those checks changes in some way and causes it to operate differently, this test will
* fail.
*/
@Test
public void testTypedefConflictToConflictStructMultiRef() {
StructureDataType struct1a = createPopulated(dataMgr);
TypeDef td1a = new TypedefDataType(root, "typedef1", struct1a, dataMgr);
DataType td1a_result = dataMgr.addDataType(td1a, DWARFDataTypeConflictHandler.INSTANCE);
StructureDataType struct1b = createPopulated2(dataMgr);
TypeDef td1b = new TypedefDataType(root, "typedef1", struct1b, dataMgr);
// Struct2 is used to create multiple references to the same conflicting typedef impl.
StructureDataType struct2 = new StructureDataType(root, "struct2", 0, dataMgr);
struct2.add(td1b, "typedef1_instance1", "first");
struct2.add(td1b, "typedef1_instance2", "second");
struct2.add(td1b, "typedef1_instance3", "third");
Structure struct2_result =
(Structure) dataMgr.addDataType(struct2, DWARFDataTypeConflictHandler.INSTANCE);
TypeDef td1b_result = (TypeDef) struct2_result.getComponent(0).getDataType();
String td1b_conflict_name = td1b_result.getPathName();
Assert.assertNotEquals(td1b_conflict_name, td1a_result.getPathName());
for (DataTypeComponent dtc : struct2_result.getComponents()) {
DataType dtcDT = dtc.getDataType();
String dtcDTName = dtcDT.getPathName();
assertEquals(dtcDTName, td1b_conflict_name);
}
}
/**
* Tests the {@link DWARFDataTypeConflictHandler#INSTANCE}
* conflict handler when adding a conflicting typedef impl (but equiv) that is referred to multiple
* times during a single addDataType() call.
* <p>
* Success is if the fields of struct2 are all the original typedef1 type.
* <p>
* A failure would be if the fields of struct2 are different types.
*/
@Test
public void testTypedefToStubUseExistingTypedefToPopulatedStructureMultiRef() {
StructureDataType struct1a = createPopulated(dataMgr);
TypeDef td1a = new TypedefDataType(root, "typedef1", struct1a, dataMgr);
DataType td1a_result = dataMgr.addDataType(td1a, DWARFDataTypeConflictHandler.INSTANCE);
String origtd1Name = td1a_result.getPathName();
StructureDataType struct1b = createStub(dataMgr, 0);
TypeDef td1b = new TypedefDataType(root, "typedef1", struct1b, dataMgr);
PointerDataType ptd = new PointerDataType(td1b, program.getDefaultPointerSize(), dataMgr);
// Struct2 is used to create multiple references to the same conflicting typedef impl.
// Use a pointer to the typedef, otherwise struct2's size will be wrong when the
// conflicting struct1 impl size changes from 0 to 10.
StructureDataType struct2 = new StructureDataType(root, "struct2", 0, dataMgr);
struct2.add(ptd, "typedef1_instance1", "first");
struct2.add(ptd, "typedef1_instance2", "second");
struct2.add(ptd, "typedef1_instance3", "third");
Structure struct2_result =
(Structure) dataMgr.addDataType(struct2, DWARFDataTypeConflictHandler.INSTANCE);
for (DataTypeComponent dtc : struct2_result.getComponents()) {
Pointer pr = (Pointer) dtc.getDataType();
TypeDef tr = (TypeDef) pr.getDataType();
String dtcDTName = tr.getPathName();
assertEquals(origtd1Name, dtcDTName);
}
}
/**
* Tests the {@link DWARFDataTypeConflictHandler#INSTANCE}
* conflict handler when adding a typedef to a populated when there is already a typedef
* to a stub structure.
*/
@Test
public void testAddTypedefToPopulatedStructReplaceTypedefToStubStructure() {
StructureDataType struct1a = createStub(dataMgr, 0);
TypeDef td1a = new TypedefDataType(root, "typedef1", struct1a, dataMgr);
DataType td1a_result = dataMgr.addDataType(td1a, DWARFDataTypeConflictHandler.INSTANCE);
String td1a_pathname = td1a_result.getPathName();
String struct1a_pathname = ((TypeDef) td1a_result).getDataType().getPathName();
StructureDataType struct1b = createPopulated(dataMgr);
TypeDef td1b = new TypedefDataType(root, "typedef1", struct1b, dataMgr);
DataType td1b_result = dataMgr.addDataType(td1b, DWARFDataTypeConflictHandler.INSTANCE);
String td1b_pathname = td1b_result.getPathName();
String struct1b_pathname = ((TypeDef) td1b_result).getDataType().getPathName();
assertEquals("Typedef should have same name as previous typedef", td1a_pathname,
td1b_pathname);
assertEquals("Typedef target should have same name as previous typedef target",
struct1a_pathname, struct1b_pathname);
}
@Test
public void testResolveDataTypeStructConflict() throws Exception {
DataTypeManager dtm = new StandAloneDataTypeManager("Test");
int id = dtm.startTransaction("");
Category otherRoot = dataMgr.getRootCategory();
Category subc = otherRoot.createCategory("subc");
Structure struct = new StructureDataType(subc.getCategoryPath(), "struct1", 10);
DataType resolvedStruct = dtm.resolve(struct, DWARFDataTypeConflictHandler.INSTANCE);
assertTrue(struct.isEquivalent(resolvedStruct));
assertEquals("/subc/struct1", resolvedStruct.getPathName());
struct.replace(0, dtm.resolve(new PointerDataType(resolvedStruct, 4, dtm),
DWARFDataTypeConflictHandler.INSTANCE), 4);
// NOTE: placing a DB dataType in an Impl datatype results in an invalid
// Impl type if one of its children refer to a deleted datatype. The
// 'struct' instance is such a case.
DataType resolvedStructA = dtm.resolve(struct, DWARFDataTypeConflictHandler.INSTANCE);
// Update struct with the expected result (old empty struct was replaced)
struct.replace(0, new PointerDataType(resolvedStructA, 4, dtm), 4);
assertTrue(struct.isEquivalent(resolvedStructA));
assertEquals("/subc/struct1", resolvedStructA.getPathName());
dtm.endTransaction(id, true);
dtm.close();
}
@Test
public void testResolveDataTypeNonStructConflict() throws Exception {
DataTypeManager dtm = new StandAloneDataTypeManager("Test");
int id = dtm.startTransaction("");
Category otherRoot = dataMgr.getRootCategory();
Category subc = otherRoot.createCategory("subc");
EnumDataType e = new EnumDataType(subc.getCategoryPath(), "Enum", 2);
DataType resolvedEnum = dtm.resolve(e, DWARFDataTypeConflictHandler.INSTANCE);
assertTrue(e.isEquivalent(resolvedEnum));
assertEquals("/subc/Enum", resolvedEnum.getPathName());
e.add("xyz", 1);
resolvedEnum = dtm.resolve(e, DWARFDataTypeConflictHandler.INSTANCE);
assertTrue(e.isEquivalent(resolvedEnum));
assertEquals("/subc/Enum.conflict", resolvedEnum.getPathName());
dtm.endTransaction(id, true);
dtm.close();
}
}

View File

@ -433,8 +433,7 @@ public class DWARFDataTypeImporterTest extends DWARFTestBase {
* other. (gcc linking options can cause types from different namespaces to be
* forced into the root namespace)
* <p>
* Currently this causes a collision and a failure, resulting in just one of the structures
* being defined, which isn't great but there is no other workable solution.
* Currently this produces two structures one renamed with .conflict.
* <p>
* If this test starts failing it means this behavior in Ghidra's DTM has changed and
* the DWARF logic needs to be examined in light of those changes.
@ -460,7 +459,7 @@ public class DWARFDataTypeImporterTest extends DWARFTestBase {
DataType dt1b = dwarfDTM.getDataType(struct1bDIE.getOffset(), null);
assertEquals("mystruct", dt1a.getName());
assertNull(dt1b);
assertEquals("mystruct.conflict", dt1b.getName());
}
/**
@ -497,8 +496,10 @@ public class DWARFDataTypeImporterTest extends DWARFTestBase {
* datatype name as the impl struct. The embedded db struct needs to be empty and default
* sized (1 byte), and the outer impl struct needs to be bigger.
* <p>
* Currently the DTM resolve() will return the new outer struct, but its field that
* refs the conflicting 1-byte struct has been changed to undefined (but name is still there).
* Currently the DTM resolve() will ignore the conflict handlers attempt to
* replace since it will result in cyclic dependency issue. It will instead
* rename the new structure as a conflict with its field refering to the original
* structure.
* <p>
* This situation happens in DWARF when there is a base class and a derived class
* that have the same name. They are in different namespaces, but during compilation
@ -515,10 +516,11 @@ public class DWARFDataTypeImporterTest extends DWARFTestBase {
StructureDataType x2 = new StructureDataType(rootCP, "X", 4);
x2.replaceAtOffset(0, x, 1, "f1", null);
Structure x3 = (Structure) dataMgr.resolve(x2, DataTypeConflictHandler.REPLACE_HANDLER);
assertEquals("X.conflict", x3.getName());
DataTypeComponent dtc = x3.getComponent(0);
DataType dtcDT = dtc.getDataType();
assertEquals("f1", dtc.getFieldName());
assertEquals("undefined", dtcDT.getName()); // undefined field is current behavior
assertEquals("X", dtcDT.getName()); // undefined field is current behavior
}
@Test
@ -699,7 +701,7 @@ public class DWARFDataTypeImporterTest extends DWARFTestBase {
List<DataTypeComponent> getBitFieldComponents(Structure struct) {
List<DataTypeComponent> results = new ArrayList<>();
for (DataTypeComponent dtc : struct.getComponents()) {
for (DataTypeComponent dtc : struct.getDefinedComponents()) {
if (dtc.getDataType() instanceof BitFieldDataType) {
results.add(dtc);
}

View File

@ -21,7 +21,6 @@ import java.util.*;
import org.junit.*;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.program.database.ProgramBuilder;
import ghidra.program.database.ProgramDB;
import ghidra.program.model.data.*;

View File

@ -90,14 +90,6 @@ public class ConflictHandlerTest extends AbstractGhidraHeadedIntegrationTest {
return struct;
}
private StructureDataType createPopulated2Partial(DataTypeManager dtm) {
StructureDataType struct = createPopulated2(dtm);
struct.clearComponent(2);
struct.clearComponent(1);
return struct;
}
private StructureDataType createStub(DataTypeManager dtm, int size) {
return new StructureDataType(root, "struct1", size, dtm);
}
@ -145,8 +137,9 @@ public class ConflictHandlerTest extends AbstractGhidraHeadedIntegrationTest {
addedResult.isEquivalent(addingCopy));
assertFalse("Added DataType should not be equiv to existing DataType",
addedResult.isEquivalent(existingResult_copy));
assertTrue("Overwritten DataType should have a deleted flag",
existingResult.isDeleted());
// NOTE: direct member replacement works in most cases
// assertTrue("Overwritten DataType should have a deleted flag",
// existingResult.isDeleted());
break;
case RENAME_AND_ADD:
Assert.assertNotEquals("DataType name should have changed", addingCopy.getName(),
@ -171,19 +164,9 @@ public class ConflictHandlerTest extends AbstractGhidraHeadedIntegrationTest {
@Test
public void testAddEmptyStructResolveToPopulatedStruct2() {
assertStruct(createPopulated(dataMgr), createStub(dataMgr, 1), ConflictResult.USE_EXISTING);
}
@Test
public void testAddEmptyStructResolveToPopulatedStruct3() {
assertStruct(createPopulated(null), createStub(null, 0), ConflictResult.USE_EXISTING);
}
@Test
public void testAddEmptyStructResolveToPopulatedStruct4() {
assertStruct(createPopulated(null), createStub(null, 1), ConflictResult.USE_EXISTING);
}
/**
* Tests the {@link DataTypeConflictHandler#REPLACE_EMPTY_STRUCTS_OR_RENAME_AND_ADD_HANDLER RESORAAH}
* conflict handler to ensure that adding a populated structure replaces an existing
@ -198,34 +181,9 @@ public class ConflictHandlerTest extends AbstractGhidraHeadedIntegrationTest {
@Test
public void testAddPopulatedStructOverwriteStub2() {
assertStruct(createStub(dataMgr, 1), createPopulated(dataMgr),
ConflictResult.REPLACE_EXISTING);
}
@Test
public void testAddPopulatedStructOverwriteStub3() {
assertStruct(createStub(null, 0), createPopulated(null), ConflictResult.REPLACE_EXISTING);
}
@Test
public void testAddPopulatedStructOverwriteStub4() {
assertStruct(createStub(null, 1), createPopulated(null), ConflictResult.REPLACE_EXISTING);
}
@Test
public void testAddPopulatedStructOverwriteSameSizedStub() {
StructureDataType populated = createPopulated(dataMgr);
assertStruct(createStub(dataMgr, populated.getLength()), populated,
ConflictResult.REPLACE_EXISTING);
}
@Test
public void testAddStubStructUseSameSizedPopulated() {
StructureDataType populated = createPopulated(dataMgr);
assertStruct(populated, createStub(dataMgr, populated.getLength()),
ConflictResult.USE_EXISTING);
}
@Test
public void testAddStubStructCreateConflict() {
StructureDataType populated = createPopulated(dataMgr);
@ -233,18 +191,6 @@ public class ConflictHandlerTest extends AbstractGhidraHeadedIntegrationTest {
ConflictResult.RENAME_AND_ADD);
}
@Test
public void testAddPartialStructResolveToPopulatedStruct() {
assertStruct(createPopulated2(dataMgr), createPopulated2Partial(dataMgr),
ConflictResult.USE_EXISTING);
}
@Test
public void testAddPopulatedStructOverwritePartialStruct() {
assertStruct(createPopulated2Partial(dataMgr), createPopulated2(dataMgr),
ConflictResult.REPLACE_EXISTING);
}
@Test
public void testAddStubUnionResolveToPopulated() {
Union populated = new UnionDataType(root, "union1", dataMgr);
@ -267,19 +213,6 @@ public class ConflictHandlerTest extends AbstractGhidraHeadedIntegrationTest {
assertStruct(stub, populated, ConflictResult.REPLACE_EXISTING);
}
@Test
public void testAddPopulatedUnionOverwritePartial() {
Union populated = new UnionDataType(root, "union1", dataMgr);
populated.add(new CharDataType(dataMgr), 1, "blah1", null);
populated.add(new IntegerDataType(dataMgr), 4, "blah2", null);
populated.add(new IntegerDataType(dataMgr), 4, "blah3", null);
Union partial = new UnionDataType(root, "union1", dataMgr);
partial.add(new CharDataType(dataMgr), 1, "blah1", null);
assertStruct(partial, populated, ConflictResult.REPLACE_EXISTING);
}
@Test
public void testAddConflictUnion() {
Union populated = new UnionDataType(root, "union1", dataMgr);
@ -293,21 +226,6 @@ public class ConflictHandlerTest extends AbstractGhidraHeadedIntegrationTest {
assertStruct(populated, populated2, ConflictResult.RENAME_AND_ADD);
}
@Test
public void testAddPartialUnionWithStubStructResolveToExisting() {
Structure s1a = createPopulated(dataMgr);
Union populated = new UnionDataType(root, "union1", dataMgr);
populated.add(new CharDataType(dataMgr), 1, "blah1", null);
populated.add(s1a, s1a.getLength(), "blah2", null);
populated.add(s1a, s1a.getLength(), null, null);
Structure s1b = createStub(dataMgr, 0);
Union partial = new UnionDataType(root, "union1", dataMgr);
partial.add(s1b, s1b.getLength(), "blah2", null);
assertStruct(populated, partial, ConflictResult.USE_EXISTING);
}
/**
* Tests the {@link DataTypeConflictHandler#REPLACE_EMPTY_STRUCTS_OR_RENAME_AND_ADD_HANDLER RESORAAH}
* conflict handler to ensure that adding a conflicting typedef to a conflicting stub structure
@ -477,40 +395,6 @@ public class ConflictHandlerTest extends AbstractGhidraHeadedIntegrationTest {
struct1a_pathname, struct1b_pathname);
}
@Test
public void testResolveDataTypeStructConflict() throws Exception {
DataTypeManager dtm = new StandAloneDataTypeManager("Test");
int id = dtm.startTransaction("");
Category otherRoot = dataMgr.getRootCategory();
Category subc = otherRoot.createCategory("subc");
Structure struct = new StructureDataType(subc.getCategoryPath(), "struct1", 10);
DataType resolvedStruct = dtm.resolve(struct,
DataTypeConflictHandler.REPLACE_EMPTY_STRUCTS_OR_RENAME_AND_ADD_HANDLER);
assertTrue(struct.isEquivalent(resolvedStruct));
assertEquals("/subc/struct1", resolvedStruct.getPathName());
struct.replace(0, dtm.resolve(new PointerDataType(resolvedStruct, 4, dtm),
DataTypeConflictHandler.REPLACE_EMPTY_STRUCTS_OR_RENAME_AND_ADD_HANDLER), 4);
// NOTE: placing a DB dataType in an Impl datatype results in an invalid
// Impl type if one of its children refer to a deleted datatype. The
// 'struct' instance is such a case.
DataType resolvedStructA = dtm.resolve(struct,
DataTypeConflictHandler.REPLACE_EMPTY_STRUCTS_OR_RENAME_AND_ADD_HANDLER);
// Update struct with the expected result (old empty struct was replaced)
struct.replace(0, new PointerDataType(resolvedStructA, 4, dtm), 4);
assertTrue(struct.isEquivalent(resolvedStructA));
assertEquals("/subc/struct1", resolvedStructA.getPathName());
dtm.endTransaction(id, true);
dtm.close();
}
@Test
public void testResolveDataTypeNonStructConflict() throws Exception {
DataTypeManager dtm = new StandAloneDataTypeManager("Test");

View File

@ -15,10 +15,11 @@
*/
package ghidra.base.project;
import static generic.test.AbstractGenericTest.*;
import static generic.test.TestUtils.argTypes;
import static generic.test.TestUtils.args;
import static org.junit.Assert.assertTrue;
import static generic.test.AbstractGTest.*;
import static generic.test.AbstractGenericTest.getInstanceField;
import static generic.test.AbstractGenericTest.invokeInstanceMethod;
import static generic.test.TestUtils.*;
import static org.junit.Assert.*;
import java.io.IOException;
import java.util.List;
@ -68,7 +69,7 @@ public class FakeSharedProject {
LocalFileSystem fs = repo.getSharedFileSystem();
if (fs != null) {
// first project will keeps its' versioned file system
// first project will keeps its versioned file system
setVersionedFileSystem(fs);
}
}

View File

@ -27,24 +27,24 @@ public class RenameStructureFieldTask extends RenameTask {
private Structure structure;
public int offset;
public RenameStructureFieldTask(PluginTool tool, Program program, DecompilerPanel panel,
ClangToken token, Structure structure, int offset) {
super(tool, program, panel, token, token.getText());
this.structure = structure;
this.offset = offset;
}
@Override
public void commit() throws DuplicateNameException, InvalidInputException {
if (structure.isNotYetDefined()) {
DataType newtype = new Undefined1DataType();
structure.insert(0,newtype);
structure.insert(0, newtype);
}
DataTypeComponent comp = structure.getComponentAt(offset);
if (comp.getDataType() == DataType.DEFAULT) { // Is this just a placeholder
DataType newtype = new Undefined1DataType();
structure.replaceAtOffset(offset, newtype,1, newName, "Created by retype action");
structure.replaceAtOffset(offset, newtype, 1, newName, "Created by retype action");
}
else {
comp.setFieldName(newName);
@ -59,9 +59,8 @@ public class RenameStructureFieldTask extends RenameTask {
@Override
public boolean isValid(String newNm) {
newName = newNm;
DataTypeComponent[] comp = structure.getComponents();
DataTypeComponent[] comp = structure.getDefinedComponents();
for (DataTypeComponent element : comp) {
// if (comp[i].getDataType() == DataType.DEFAULT) continue; // Placeholder, don't compare name
String fieldname = element.getFieldName();
if (fieldname == null) {
continue;

View File

@ -261,7 +261,7 @@ public class DataTypeDependencyOrderer {
}
else if (dataType instanceof Structure) {
Structure struct = (Structure) dataType;
DataTypeComponent dtcomps[] = struct.getComponents();
DataTypeComponent dtcomps[] = struct.getDefinedComponents();
for (DataTypeComponent dtcomp : dtcomps) {
addDependent(entry, dtcomp.getDataType());
}

View File

@ -33,7 +33,7 @@ import ghidra.util.timer.GTimerMonitor;
/**
* <code>BlockStreamServer</code> provides a block stream server implementation intended for
* integration with the RMI GhidraServer implementation. The default instance will obtain its'
* integration with the RMI GhidraServer implementation. The default instance will obtain its
* port from the {@link ServerPortFactory} while all instances will bind to the default
* {@link InetAddress#getLocalHost()} or the host address specified via the RMI property
* <code>java.rmi.server.hostname</code> which is set via the GhidraServer -ip command

View File

@ -141,7 +141,7 @@ class DefaultCompositeMember extends CompositeMember {
* @param baseDataType bitfield base datatype
* @param bitSize bitfield size in bits
* @param bitOffsetWithinBaseType offset of bitfield within base type
* @throws InvalidDataTypeException
* @throws InvalidDataTypeException invalid baseDataType for bitfield
*/
private DefaultCompositeMember(int componentOffset, DataType baseDataType, int bitSize,
int bitOffsetWithinBaseType) throws InvalidDataTypeException {
@ -340,10 +340,10 @@ class DefaultCompositeMember extends CompositeMember {
}
}
private boolean isGoodAlignment(Composite testCompsosite, int preferredSize) {
private boolean isGoodAlignment(Composite testComposite, int preferredSize) {
boolean alignOK = true;
if (preferredSize > 0 && testCompsosite.getNumComponents() != 0) {
alignOK = (testCompsosite.getLength() == preferredSize);
if (preferredSize > 0 && testComposite.getNumComponents() != 0) {
alignOK = (testComposite.getLength() == preferredSize);
}
if (alignOK && isStructureContainer()) {
@ -351,7 +351,7 @@ class DefaultCompositeMember extends CompositeMember {
Structure struct = (Structure) memberDataType;
DataTypeComponent[] unalignedComponents = struct.getDefinedComponents();
int index = 0;
for (DataTypeComponent dtc : testCompsosite.getComponents()) {
for (DataTypeComponent dtc : testComposite.getComponents()) {
DataTypeComponent unalignedDtc = unalignedComponents[index++];
if (!isComponentUnchanged(dtc, unalignedDtc)) {
alignOK = false;

View File

@ -79,12 +79,9 @@ class PdbDataTypeParser {
return programDataTypeMgr;
}
void flushDataTypeCache(TaskMonitor monitor) throws CancelledException {
for (DataType dt : dataTypeCache.values()) {
monitor.checkCanceled();
programDataTypeMgr.resolve(dt,
DataTypeConflictHandler.REPLACE_EMPTY_STRUCTS_OR_RENAME_AND_ADD_HANDLER);
}
void flushDataTypeCache() throws CancelledException {
programDataTypeMgr.addDataTypes(dataTypeCache.values(),
DataTypeConflictHandler.REPLACE_EMPTY_STRUCTS_OR_RENAME_AND_ADD_HANDLER, monitor);
}
/**

View File

@ -294,7 +294,7 @@ public class PdbParser {
// Ensure that all data types are resolved
if (dataTypeParser != null) {
dataTypeParser.flushDataTypeCache(monitor);
dataTypeParser.flushDataTypeCache();
}
}

View File

@ -20,7 +20,7 @@ import ghidra.program.model.data.DataType;
/**
* <code>WrappedDataType</code> provide the ability to wrap
* a {@link DataType} with additional information not conveyed
* by the datatype on its' own.
* by the datatype on its own.
* <P>
* Note that a BitFieldDataType instance may be specified as the datatype
* in order to convey bitfield related information.

View File

@ -42,7 +42,7 @@ public class FieldIndexTable extends IndexTable {
/**
* Construct a new or existing secondary index. An existing index must have
* its' root ID specified within the tableRecord.
* its root ID specified within the tableRecord.
* @param primaryTable primary table.
* @param indexTableRecord specifies the index parameters.
* @throws IOException thrown if an IO error occurs
@ -244,8 +244,9 @@ public class FieldIndexTable extends IndexTable {
@Override
public boolean hasNext() throws IOException {
if (hasNext)
if (hasNext) {
return true;
}
hasPrev = false; // TODO ???
indexKey = (IndexField) indexIterator.next();
int skipCnt = 0;
@ -259,8 +260,9 @@ public class FieldIndexTable extends IndexTable {
indexKey = (IndexField) indexIterator.next();
}
if (indexKey == null)
if (indexKey == null) {
return false;
}
hasNext = true;
return true;
@ -268,8 +270,9 @@ public class FieldIndexTable extends IndexTable {
@Override
public boolean hasPrevious() throws IOException {
if (hasPrev)
if (hasPrev) {
return true;
}
hasNext = false; // TODO ???
indexKey = (IndexField) indexIterator.previous();
int skipCnt = 0;
@ -283,8 +286,9 @@ public class FieldIndexTable extends IndexTable {
indexKey = (IndexField) indexIterator.previous();
}
if (indexKey == null)
if (indexKey == null) {
return false;
}
hasPrev = true;
return true;
@ -321,12 +325,13 @@ public class FieldIndexTable extends IndexTable {
*/
@Override
public boolean delete() throws IOException {
if (lastKey == null)
if (lastKey == null) {
return false;
}
synchronized (db) {
long[] keys = findPrimaryKeys(lastKey.getIndexField());
for (int i = 0; i < keys.length; i++) {
primaryTable.deleteRecord(keys[i]);
for (long key : keys) {
primaryTable.deleteRecord(key);
}
lastKey = null;
return true;

View File

@ -48,7 +48,7 @@ class FixedIndexTable extends IndexTable {
/**
* Construct a new or existing secondary index. An existing index must have
* its' root ID specified within the tableRecord.
* its root ID specified within the tableRecord.
* @param primaryTable primary table.
* @param indexTableRecord specifies the index parameters.
* @throws IOException thrown if an IO error occurs
@ -66,11 +66,13 @@ class FixedIndexTable extends IndexTable {
*/
@Override
long[] findPrimaryKeys(Field indexValue) throws IOException {
if (!indexValue.getClass().equals(fieldType.getClass()))
if (!indexValue.getClass().equals(fieldType.getClass())) {
throw new IllegalArgumentException("Incorrect indexed field type");
}
Record indexRecord = indexTable.getRecord(indexValue.getLongValue());
if (indexRecord == null)
if (indexRecord == null) {
return emptyKeyArray;
}
IndexBuffer indexBuffer = new IndexBuffer(indexValue, indexRecord.getBinaryData(0));
return indexBuffer.getPrimaryKeys();
}
@ -83,11 +85,13 @@ class FixedIndexTable extends IndexTable {
*/
@Override
int getKeyCount(Field indexValue) throws IOException {
if (!indexValue.getClass().equals(fieldType.getClass()))
if (!indexValue.getClass().equals(fieldType.getClass())) {
throw new IllegalArgumentException("Incorrect indexed field type");
}
Record indexRecord = indexTable.getRecord(indexValue.getLongValue());
if (indexRecord == null)
if (indexRecord == null) {
return 0;
}
IndexBuffer indexBuffer = new IndexBuffer(indexValue, indexRecord.getBinaryData(0));
return indexBuffer.keyCount;
}
@ -236,8 +240,9 @@ class FixedIndexTable extends IndexTable {
@Override
public boolean hasNext() throws IOException {
if (hasNext)
if (hasNext) {
return true;
}
try {
long key = indexIterator.next();
keyField = fieldType.newField();
@ -253,8 +258,9 @@ class FixedIndexTable extends IndexTable {
@Override
public boolean hasPrevious() throws IOException {
if (hasPrev)
if (hasPrev) {
return true;
}
try {
long key = indexIterator.previous();
keyField = fieldType.newField();
@ -297,14 +303,15 @@ class FixedIndexTable extends IndexTable {
*/
@Override
public boolean delete() throws IOException {
if (lastKey == null)
if (lastKey == null) {
return false;
}
synchronized (db) {
IndexBuffer indexBuf = getIndexBuffer(lastKey);
if (indexBuf != null) {
long[] keys = indexBuf.getPrimaryKeys();
for (int i = 0; i < keys.length; i++) {
primaryTable.deleteRecord(keys[i]);
for (long key : keys) {
primaryTable.deleteRecord(key);
}
// The following does not actually delete the index record since it
// should already have been removed with the removal of all associated

View File

@ -63,14 +63,15 @@ abstract class IndexTable {
/**
* Construct a new or existing secondary index. An existing index must have
* its' root ID specified within the tableRecord.
* its root ID specified within the tableRecord.
* @param primaryTable primary table.
* @param indexTableRecord specifies the index parameters.
* @throws IOException thrown if IO error occurs
*/
IndexTable(Table primaryTable, TableRecord indexTableRecord) throws IOException {
if (!primaryTable.useLongKeys())
if (!primaryTable.useLongKeys()) {
throw new AssertException("Only long-key tables may be indexed");
}
this.db = primaryTable.getDBHandle();
this.primaryTable = primaryTable;
this.indexTableRecord = indexTableRecord;
@ -90,8 +91,9 @@ abstract class IndexTable {
static IndexTable getIndexTable(DBHandle db, TableRecord indexTableRecord) throws IOException {
String name = indexTableRecord.getName();
Table primaryTable = db.getTable(name);
if (primaryTable == null)
if (primaryTable == null) {
throw new AssertException("Table not found: " + name);
}
if (indexTableRecord.getSchema().getKeyFieldType() instanceof IndexField) {
return new FieldIndexTable(primaryTable, indexTableRecord);
@ -111,8 +113,9 @@ abstract class IndexTable {
* @throws IOException thrown if IO error occurs
*/
static IndexTable createIndexTable(Table primaryTable, int indexColumn) throws IOException {
if (primaryTable.getRecordCount() != 0)
if (primaryTable.getRecordCount() != 0) {
throw new AssertException();
}
return new FieldIndexTable(primaryTable, indexColumn);
}
@ -613,8 +616,9 @@ abstract class IndexTable {
*/
@Override
public boolean delete() throws IOException {
if (lastKey == null)
if (lastKey == null) {
return false;
}
synchronized (db) {
long key = lastKey.getLongValue();
primaryTable.deleteRecord(key);

View File

@ -50,7 +50,7 @@ class VarIndexTable extends IndexTable {
/**
* Construct a new or existing secondary index. An existing index must have
* its' root ID specified within the tableRecord.
* its root ID specified within the tableRecord.
* @param primaryTable primary table.
* @param indexTableRecord specifies the index parameters.
* @throws IOException thrown if an IO error occurs
@ -69,11 +69,13 @@ class VarIndexTable extends IndexTable {
*/
@Override
long[] findPrimaryKeys(Field indexValue) throws IOException {
if (!indexValue.getClass().equals(fieldType.getClass()))
if (!indexValue.getClass().equals(fieldType.getClass())) {
throw new IllegalArgumentException("Incorrect indexed field type");
}
Record indexRecord = indexTable.getRecord(indexValue);
if (indexRecord == null)
if (indexRecord == null) {
return emptyKeyArray;
}
IndexBuffer indexBuffer = new IndexBuffer(indexValue, indexRecord.getBinaryData(0));
return indexBuffer.getPrimaryKeys();
}
@ -87,11 +89,13 @@ class VarIndexTable extends IndexTable {
*/
@Override
int getKeyCount(Field indexValue) throws IOException {
if (!indexValue.getClass().equals(fieldType.getClass()))
if (!indexValue.getClass().equals(fieldType.getClass())) {
throw new IllegalArgumentException("Incorrect indexed field type");
}
Record indexRecord = indexTable.getRecord(indexValue);
if (indexRecord == null)
if (indexRecord == null) {
return 0;
}
IndexBuffer indexBuffer = new IndexBuffer(indexValue, indexRecord.getBinaryData(0));
return indexBuffer.keyCount;
}
@ -237,11 +241,13 @@ class VarIndexTable extends IndexTable {
@Override
public boolean hasNext() throws IOException {
if (hasNext)
if (hasNext) {
return true;
}
Field key = indexIterator.next();
if (key == null)
if (key == null) {
return false;
}
keyField = key;
hasNext = true;
hasPrev = false;
@ -250,11 +256,13 @@ class VarIndexTable extends IndexTable {
@Override
public boolean hasPrevious() throws IOException {
if (hasPrev)
if (hasPrev) {
return true;
}
Field key = indexIterator.previous();
if (key == null)
if (key == null) {
return false;
}
keyField = key;
hasNext = false;
hasPrev = true;
@ -290,14 +298,15 @@ class VarIndexTable extends IndexTable {
*/
@Override
public boolean delete() throws IOException {
if (lastKey == null)
if (lastKey == null) {
return false;
}
synchronized (db) {
IndexBuffer indexBuf = getIndexBuffer(lastKey);
if (indexBuf != null) {
long[] keys = indexBuf.getPrimaryKeys();
for (int i = 0; i < keys.length; i++) {
primaryTable.deleteRecord(keys[i]);
for (long key : keys) {
primaryTable.deleteRecord(key);
}
// The following does not actually delete the index record since it
// should already have been removed with the removal of all associated

View File

@ -207,8 +207,9 @@ public class LocalBufferFile implements BufferFile {
* @throws IOException if an I/O error occurs during file creation
*/
public LocalBufferFile(File file, int bufferSize) throws IOException {
if (file.exists())
if (file.exists()) {
throw new DuplicateFileException("File " + file + " already exists");
}
this.file = file;
this.bufferSize = bufferSize;
this.blockSize = bufferSize + BUFFER_PREFIX_SIZE;
@ -293,8 +294,9 @@ public class LocalBufferFile implements BufferFile {
@Override
public int getParameter(String name) throws NoSuchElementException {
Object obj = userParms.get(name);
if (obj == null)
if (obj == null) {
throw new NoSuchElementException(name);
}
return ((Integer) obj).intValue();
}
@ -409,7 +411,7 @@ public class LocalBufferFile implements BufferFile {
/**
* Set random access file (raf) position to the file block containing the specified buffer
* identified by its' bufferIndex. It is important to understand the distinction between
* identified by its bufferIndex. It is important to understand the distinction between
* blocks and buffers, where buffers are stored within file blocks which are slightly larger.
* In addition, the first file block stores the file header and is not used to store a buffer.
* @param bufferIndex buffer index
@ -449,24 +451,27 @@ public class LocalBufferFile implements BufferFile {
// Check magic number
long magicNumber = raf.readLong();
if (magicNumber != MAGIC_NUMBER)
if (magicNumber != MAGIC_NUMBER) {
throw new IOException("Unrecognized file format");
}
// Read file ID
fileId = raf.readLong();
// Check file format version
int headerFormatVersion = raf.readInt();
if (headerFormatVersion != HEADER_FORMAT_VERSION)
if (headerFormatVersion != HEADER_FORMAT_VERSION) {
throw new IOException("Unrecognized file format");
}
// Read buffer size, free buffer count, and first free buffer index
blockSize = raf.readInt();
bufferSize = blockSize - BUFFER_PREFIX_SIZE;
int firstFreeBufferIndex = raf.readInt();
long len = raf.length();
if ((len % blockSize) != 0)
if ((len % blockSize) != 0) {
throw new IOException("Corrupt file");
}
bufferCount = (int) (len / blockSize) - 1;
// Read user-defined integer parameters values
@ -488,13 +493,13 @@ public class LocalBufferFile implements BufferFile {
*/
private void writeHeader() throws IOException {
if (readOnly)
if (readOnly) {
throw new IOException("File is read-only");
}
// Output free list
int prev = -1;
for (int i = 0; i < freeIndexes.length; i++) {
int index = freeIndexes[i];
for (int index : freeIndexes) {
putFreeBlock(index, prev);
prev = index;
}
@ -512,15 +517,15 @@ public class LocalBufferFile implements BufferFile {
String[] parmNames = getParameterNames();
raf.writeInt(parmNames.length);
int cnt = VER1_FIXED_HEADER_LENGTH;
for (int i = 0; i < parmNames.length; i++) {
byte[] nameBytes = parmNames[i].getBytes(STRING_ENCODING);
for (String parmName : parmNames) {
byte[] nameBytes = parmName.getBytes(STRING_ENCODING);
cnt += 8 + nameBytes.length;
if (cnt > bufferSize) {
throw new IOException("Buffer size too small");
}
raf.writeInt(nameBytes.length);
raf.write(nameBytes);
raf.writeInt(getParameter(parmNames[i]));
raf.writeInt(getParameter(parmName));
}
}
@ -624,8 +629,9 @@ public class LocalBufferFile implements BufferFile {
byte[] data = buf.data;
boolean empty = buf.isEmpty();
if (!empty && data.length != bufferSize)
if (!empty && data.length != bufferSize) {
throw new IllegalArgumentException("Bad buffer size");
}
int blockIndex = buf.getId() + 1;
@ -644,10 +650,12 @@ public class LocalBufferFile implements BufferFile {
@Override
public synchronized DataBuffer get(DataBuffer buf, int index) throws IOException {
if (index > bufferCount)
if (index > bufferCount) {
throw new EOFException("Buffer index too large (" + index + " > " + bufferCount + ")");
if (raf == null)
}
if (raf == null) {
throw new ClosedException();
}
seekBufferBlock(index);
@ -684,18 +692,22 @@ public class LocalBufferFile implements BufferFile {
@Override
public synchronized void put(DataBuffer buf, int index) throws IOException {
if (readOnly)
if (readOnly) {
throw new IOException("File is read-only");
if (raf == null)
}
if (raf == null) {
throw new ClosedException();
}
if (index > MAX_BUFFER_INDEX)
if (index > MAX_BUFFER_INDEX) {
throw new EOFException("Buffer index too large, exceeds max-int");
}
byte[] data = buf.data;
boolean empty = buf.isEmpty();
if (!empty && data.length != bufferSize)
if (!empty && data.length != bufferSize) {
throw new IllegalArgumentException("Bad buffer size");
}
seekBufferBlock(index);
@ -741,8 +753,9 @@ public class LocalBufferFile implements BufferFile {
*/
void truncate(int indexCount) throws IOException {
if (readOnly)
if (readOnly) {
throw new IOException("File is read-only");
}
long size = (indexCount + 1) * blockSize;
raf.setLength(size);
@ -756,8 +769,9 @@ public class LocalBufferFile implements BufferFile {
*/
boolean flush() throws IOException {
if (raf == null || readOnly || temporary)
if (raf == null || readOnly || temporary) {
return false;
}
// write header
writeHeader();
@ -824,8 +838,9 @@ public class LocalBufferFile implements BufferFile {
@Override
public synchronized boolean setReadOnly() throws IOException {
if (!flush())
if (!flush()) {
return false;
}
raf.close();
raf = new RandomAccessFile(file, "r");
@ -893,8 +908,9 @@ public class LocalBufferFile implements BufferFile {
@Override
public synchronized boolean delete() {
if (raf == null || readOnly)
if (raf == null || readOnly) {
return false;
}
boolean success = false;
try {
@ -1376,11 +1392,13 @@ public class LocalBufferFile implements BufferFile {
*/
public static void copyFile(BufferFile srcFile, BufferFile destFile, ChangeMap changeMap,
TaskMonitor monitor) throws IOException, CancelledException {
if (destFile.isReadOnly())
if (destFile.isReadOnly()) {
throw new IOException("File is read-only");
}
if (srcFile.getBufferSize() != destFile.getBufferSize())
if (srcFile.getBufferSize() != destFile.getBufferSize()) {
throw new IOException("Buffer sizes differ");
}
if (monitor == null) {
monitor = TaskMonitorAdapter.DUMMY_MONITOR;
@ -1405,8 +1423,7 @@ public class LocalBufferFile implements BufferFile {
if (headerTransferRequired) {
destFile.clearParameters();
String[] parmNames = srcFile.getParameterNames();
for (int i = 0; i < parmNames.length; i++) {
String name = parmNames[i];
for (String name : parmNames) {
destFile.setParameter(name, srcFile.getParameter(name));
}
monitor.setProgress(srcBlockCnt + 1);
@ -1512,12 +1529,13 @@ public class LocalBufferFile implements BufferFile {
*/
public static void cleanupOldPreSaveFiles(File dir, long beforeNow) {
File[] oldFiles = dir.listFiles(new BufferFileFilter(null, PRESAVE_FILE_EXT));
if (oldFiles == null)
if (oldFiles == null) {
return;
for (int i = 0; i < oldFiles.length; i++) {
if ((beforeNow == 0 || oldFiles[i].lastModified() < beforeNow) &&
oldFiles[i].delete()) {
Msg.info(LocalBufferFile.class, "Removed old presave file: " + oldFiles[i]);
}
for (File oldFile : oldFiles) {
if ((beforeNow == 0 || oldFile.lastModified() < beforeNow) &&
oldFile.delete()) {
Msg.info(LocalBufferFile.class, "Removed old presave file: " + oldFile);
}
}
}
@ -1529,8 +1547,9 @@ public class LocalBufferFile implements BufferFile {
*/
static int getRecommendedBufferSize(int requestedBufferSize) {
int size = (requestedBufferSize + BUFFER_PREFIX_SIZE) & -MINIMUM_BLOCK_SIZE;
if (size <= 0)
if (size <= 0) {
size = MINIMUM_BLOCK_SIZE;
}
return size - BUFFER_PREFIX_SIZE;
}

View File

@ -1,6 +1,5 @@
/* ###
* IP: GHIDRA
* REVIEWED: YES
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -18,28 +17,33 @@ package ghidra.util;
public class UniversalID {
private long id;
public UniversalID(long id) {
this.id = id;
}
public long getValue() {
return id;
}
@Override
public boolean equals( Object obj ) {
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj == null || !(obj instanceof UniversalID)) {
return false;
}
return ((UniversalID)obj).id == id;
return ((UniversalID) obj).id == id;
}
@Override
public int hashCode() {
return (int)id;
return (int) (id ^ (id >>> 32));
}
@Override
public String toString() {
public String toString() {
return Long.toString(id);
}
}

View File

@ -15,8 +15,7 @@
*/
package ghidra.app.plugin.core.datamgr.archive;
import ghidra.program.model.data.ArchiveType;
import ghidra.program.model.data.DataTypeManager;
import ghidra.program.model.data.*;
import ghidra.util.UniversalID;
public class BuiltInSourceArchive implements SourceArchive {

View File

@ -20,7 +20,6 @@ import java.util.*;
import db.*;
import db.util.ErrorHandler;
import ghidra.framework.store.LockException;
import ghidra.program.database.*;
import ghidra.program.database.data.DataTypeManagerDB;
import ghidra.program.database.map.*;
@ -2140,7 +2139,7 @@ public class CodeManager implements ErrorHandler, ManagerDB {
}
if (dt instanceof Structure) {
Structure structDt = (Structure) dt;
for (DataTypeComponent component : structDt.getComponents()) {
for (DataTypeComponent component : structDt.getDefinedComponents()) {
if (containsAddressComponents(component.getDataType())) {
return true;
}

View File

@ -184,9 +184,6 @@ class ArrayDB extends DataTypeDB implements Array {
@Override
public boolean isEquivalent(DataType dt) {
if (dt == null) {
return false;
}
if (dt == this) {
return true;
}

View File

@ -252,12 +252,14 @@ class CategoryDB extends DatabaseObject implements Category {
mgr.lock.acquire();
try {
checkDeleted();
dt = dt.clone(dt.getDataTypeManager());
try {
dt.setCategoryPath(getCategoryPath());
}
catch (DuplicateNameException e) {
// can't happen here because we made a copy
if (!getCategoryPath().equals(dt.getCategoryPath())) {
dt = dt.clone(dt.getDataTypeManager());
try {
dt.setCategoryPath(getCategoryPath());
}
catch (DuplicateNameException e) {
// can't happen here because we made a copy
}
}
DataType resolvedDataType = mgr.resolve(dt, handler);
return resolvedDataType;
@ -427,6 +429,7 @@ class CategoryDB extends DatabaseObject implements Category {
@Override
public Category copyCategory(Category category, DataTypeConflictHandler handler,
TaskMonitor monitor) {
// TODO: source archive handling is not documented
boolean isInSameArchive = (mgr == category.getDataTypeManager());
mgr.lock.acquire();
try {

View File

@ -45,11 +45,13 @@ abstract class CompositeDB extends DataTypeDB implements Composite {
/**
* Constructor for a composite data type (structure or union).
* @param dataMgr the data type manager containing this data type.
* @param cache DataTypeDB object cache
*
* @param dataMgr the data type manager containing this data type.
* @param cache DataTypeDB object cache
* @param compositeAdapter the database adapter for this data type.
* @param componentAdapter the database adapter for the components of this data type.
* @param record the database record for this data type.
* @param componentAdapter the database adapter for the components of this data
* type.
* @param record the database record for this data type.
*/
CompositeDB(DataTypeManagerDB dataMgr, DBObjectCache<DataTypeDB> cache,
CompositeDBAdapter compositeAdapter, ComponentDBAdapter componentAdapter,
@ -61,19 +63,21 @@ abstract class CompositeDB extends DataTypeDB implements Composite {
}
/**
* Perform initialization of instance fields during instantiation
* or instance refresh
* Perform initialization of instance fields during instantiation or instance
* refresh
*/
protected abstract void initialize();
/**
* Get the preferred length for a new component. For Unions and internally aligned
* structures the preferred component length for a fixed-length dataType will be the
* length of that dataType. Otherwise the length returned will be no larger than the
* specified length.
* Get the preferred length for a new component. For Unions and internally
* aligned structures the preferred component length for a fixed-length dataType
* will be the length of that dataType. Otherwise the length returned will be no
* larger than the specified length.
*
* @param dataType new component datatype
* @param length constrained length or -1 to force use of dataType size. Dynamic types
* such as string must have a positive length specified.
* @param length constrained length or -1 to force use of dataType size.
* Dynamic types such as string must have a positive length
* specified.
* @return preferred component length
*/
protected int getPreferredComponentLength(DataType dataType, int length) {
@ -106,12 +110,13 @@ abstract class CompositeDB extends DataTypeDB implements Composite {
/**
* Handle replacement of datatype which may impact bitfield datatype.
*
* @param bitfieldComponent bitfield component
* @param oldDt affected datatype which has been removed or replaced
* @param newDt replacement datatype
* @param true if bitfield component was modified
* @throws InvalidDataTypeException if bitfield was based upon oldDt but new datatype is
* invalid for a bitfield
* @param oldDt affected datatype which has been removed or replaced
* @param newDt replacement datatype
* @param true if bitfield component was modified
* @throws InvalidDataTypeException if bitfield was based upon oldDt but new
* datatype is invalid for a bitfield
*/
protected boolean updateBitFieldDataType(DataTypeComponentDB bitfieldComponent, DataType oldDt,
DataType newDt) throws InvalidDataTypeException {
@ -252,24 +257,36 @@ abstract class CompositeDB extends DataTypeDB implements Composite {
}
/**
* This method throws an exception if the indicated data type is an ancestor
* of this data type. In other words, the specified data type has a component
* or sub-component containing this data type.
* This method throws an exception if the indicated data type is an ancestor of
* this data type. In other words, the specified data type has a component or
* sub-component containing this data type.
*
* @param dataType the data type
* @throws IllegalArgumentException if the data type is an ancestor of this
* data type.
* @throws DataTypeDependencyException if the data type is an ancestor of this
* data type.
*/
protected void checkAncestry(DataType dataType) {
protected void checkAncestry(DataType dataType) throws DataTypeDependencyException {
if (this.equals(dataType)) {
throw new IllegalArgumentException(
throw new DataTypeDependencyException(
"Data type " + getDisplayName() + " can't contain itself.");
}
else if (DataTypeUtilities.isSecondPartOfFirst(dataType, this)) {
throw new IllegalArgumentException("Data type " + dataType.getDisplayName() + " has " +
getDisplayName() + " within it.");
throw new DataTypeDependencyException("Data type " + dataType.getDisplayName() +
" has " + getDisplayName() + " within it.");
}
}
protected DataType doCheckedResolve(DataType dt, DataTypeConflictHandler handler)
throws DataTypeDependencyException {
if (dt instanceof Pointer) {
pointerPostResolveRequired = true;
return resolve(((Pointer) dt).newPointer(DataType.DEFAULT));
}
dt = resolve(dt, handler);
checkAncestry(dt);
return dt;
}
@Override
protected void doSetNameRecord(String name) throws IOException {
record.setString(CompositeDBAdapter.COMPOSITE_NAME_COL, name);
@ -277,8 +294,9 @@ abstract class CompositeDB extends DataTypeDB implements Composite {
}
/**
* This method throws an exception if the indicated data type is not
* a valid data type for a component of this composite data type.
* This method throws an exception if the indicated data type is not a valid
* data type for a component of this composite data type.
*
* @param dataType the data type to be checked.
* @throws IllegalArgumentException if the data type is invalid.
*/
@ -576,6 +594,30 @@ abstract class CompositeDB extends DataTypeDB implements Composite {
}
}
@Override
public abstract DataTypeComponentDB[] getDefinedComponents();
@Override
protected void postPointerResolve(DataType definitionDt, DataTypeConflictHandler handler) {
Composite composite = (Composite) definitionDt;
DataTypeComponent[] definedComponents = composite.getDefinedComponents();
DataTypeComponentDB[] myDefinedComponents = getDefinedComponents();
if (definedComponents.length != myDefinedComponents.length) {
throw new IllegalArgumentException("mismatched definition datatype");
}
for (int i = 0; i < definedComponents.length; i++) {
DataTypeComponent dtc = definedComponents[i];
DataType dt = dtc.getDataType();
if (dt instanceof Pointer) {
DataTypeComponentDB myDtc = myDefinedComponents[i];
myDtc.getDataType().removeParent(this);
dt = dataMgr.resolve(dt, handler);
myDtc.setDataType(dt);
dt.addParent(this);
}
}
}
/**
* Notification that this composite data type's alignment has changed.
*/
@ -649,10 +691,12 @@ abstract class CompositeDB extends DataTypeDB implements Composite {
}
/**
* Adjusts the internal alignment of components within this composite based on the current
* settings of the internal alignment, packing, alignment type and minimum alignment value.
* This method should be called whenever any of the above settings are changed or whenever
* a components data type is changed or a component is added or removed.
* Adjusts the internal alignment of components within this composite based on
* the current settings of the internal alignment, packing, alignment type and
* minimum alignment value. This method should be called whenever any of the
* above settings are changed or whenever a components data type is changed or a
* component is added or removed.
*
* @param notify
*/
protected abstract void adjustInternalAlignment(boolean notify);
@ -665,11 +709,14 @@ abstract class CompositeDB extends DataTypeDB implements Composite {
/**
* Dump all components for use in {@link #toString()} representation.
*
* @param buffer string buffer
* @param pad padding to be used with each component output line
* @param pad padding to be used with each component output line
*/
protected void dumpComponents(StringBuilder buffer, String pad) {
for (DataTypeComponent dtc : getComponents()) {
// limit output of filler components for unaligned structures
DataTypeComponent[] components = getDefinedComponents();
for (DataTypeComponent dtc : components) {
DataType dataType = dtc.getDataType();
buffer.append(pad + dtc.getOffset());
buffer.append(pad + dataType.getName());

View File

@ -128,6 +128,9 @@ class CompositeDBAdapterV2V3 extends CompositeDBAdapter {
if (readOnly) {
throw new ReadOnlyException();
}
if (internalAlignment == UNALIGNED) {
length = 0; // aligned structures always start empty
}
long tableKey = compositeTable.getKey();
// if (tableKey <= DataManager.VOID_DATATYPE_ID) {
// tableKey = DataManager.VOID_DATATYPE_ID +1;

View File

@ -30,7 +30,6 @@ import docking.framework.DockingApplicationConfiguration;
import docking.widgets.label.GDLabel;
import ghidra.GhidraApplicationLayout;
import ghidra.GhidraLaunchable;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.framework.Application;
import ghidra.framework.ApplicationConfiguration;
import ghidra.program.model.data.*;
@ -472,8 +471,7 @@ public class DataTypeArchiveTransformer implements GhidraLaunchable {
}
private static DataTypeComponent getNamedComponent(Composite composite, String fieldName) {
DataTypeComponent[] components = composite.getComponents();
for (DataTypeComponent dataTypeComponent : components) {
for (DataTypeComponent dataTypeComponent : composite.getDefinedComponents()) {
if (fieldName.equals(dataTypeComponent.getFieldName())) {
return dataTypeComponent; // found match so return it.
}

View File

@ -23,7 +23,6 @@ import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import db.Record;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.docking.settings.Settings;
import ghidra.docking.settings.SettingsDefinition;
import ghidra.program.database.DBObjectCache;
@ -46,6 +45,7 @@ abstract class DataTypeDB extends DatabaseObject implements DataType, ChangeList
private volatile Settings defaultSettings;
private final static SettingsDefinition[] EMPTY_DEFINITIONS = new SettingsDefinition[0];
protected boolean resolving;
protected boolean pointerPostResolveRequired;
protected Lock lock;
private volatile String name;
private volatile Category category;
@ -64,41 +64,46 @@ abstract class DataTypeDB extends DatabaseObject implements DataType, ChangeList
}
/**
* Subclasses implement this to either read the name from the database record or compute
* if it is a derived name such as a pointer or array. Implementers can assume that
* the database lock will be acquired when this method is called.
* Subclasses implement this to either read the name from the database record or
* compute if it is a derived name such as a pointer or array. Implementers can
* assume that the database lock will be acquired when this method is called.
*/
protected abstract String doGetName();
/**
* Subclasses implement this to read the category path from the database record.Implementers can assume that
* the database lock will be acquired when this method is called.
* Subclasses implement this to read the category path from the database
* record.Implementers can assume that the database lock will be acquired when
* this method is called.
*/
protected abstract long doGetCategoryID();
/**
* Subclasses implement this to update the category path ID to the database. Implementers can assume that
* the database lock will be acquired when this method is called.
* Subclasses implement this to update the category path ID to the database.
* Implementers can assume that the database lock will be acquired when this
* method is called.
*/
protected abstract void doSetCategoryPathRecord(long categoryID) throws IOException;
/**
* Subclasses implement this to update the to the database. Implementers can assume that
* the database lock will be acquired when this method is called.
* Subclasses implement this to update the to the database. Implementers can
* assume that the database lock will be acquired when this method is called.
*
* @param newName new data type name
*/
protected abstract void doSetNameRecord(String newName)
throws IOException, InvalidNameException;
/**
* Subclasses implement this to read the source archive id from the record. Implementers can assume that
* the database lock will be acquired when this method is called.
* Subclasses implement this to read the source archive id from the record.
* Implementers can assume that the database lock will be acquired when this
* method is called.
*/
protected abstract UniversalID getSourceArchiveID();
/**
* Subclasses implement this to update the source archive id from the record. Implementers can assume that
* the database lock will be acquired when this method is called.
* Subclasses implement this to update the source archive id from the record.
* Implementers can assume that the database lock will be acquired when this
* method is called.
*/
protected abstract void setSourceArchiveID(UniversalID id);
@ -145,9 +150,9 @@ abstract class DataTypeDB extends DatabaseObject implements DataType, ChangeList
}
/**
* Get the current name without refresh.
* This is intended to be used for event generation when an old-name
* is needed.
* Get the current name without refresh. This is intended to be used for event
* generation when an old-name is needed.
*
* @return old name
*/
protected final String getOldName() {
@ -177,13 +182,12 @@ abstract class DataTypeDB extends DatabaseObject implements DataType, ChangeList
}
/**
* Set the data in the form of the appropriate Object for
* this DataType.
* Set the data in the form of the appropriate Object for this DataType.
*
* @param buf the data buffer.
* @param buf the data buffer.
* @param settings the display settings for the current value.
* @param length the number of bytes to set the value from.
* @param value the new value to set object
* @param length the number of bytes to set the value from.
* @param value the new value to set object
*/
public void setValue(MemBuffer buf, Settings settings, int length, Object value) {
@ -219,9 +223,7 @@ abstract class DataTypeDB extends DatabaseObject implements DataType, ChangeList
*/
@Override
public void stateChanged(ChangeEvent e) {
if (dataMgr != null) {
dataMgr.dataTypeChanged(this);
}
dataMgr.dataTypeChanged(this);
}
/**
@ -266,21 +268,26 @@ abstract class DataTypeDB extends DatabaseObject implements DataType, ChangeList
}
protected DataType resolve(DataType dt) {
return resolve(dt, null);
return resolve(dt, dataMgr.getCurrentConflictHandler());
}
protected DataType resolve(DataType dt, DataTypeConflictHandler handler) {
if (dataMgr != null) {
resolving = true;
// complex types should keep equivalence checks to a minimum while resolving
// and when post-resolve required for pointers
resolving = true;
try {
dt = dataMgr.resolve(dt, handler);
}
finally {
resolving = false;
}
return dt;
}
/**
* @see ghidra.program.model.data.DataType#getCategoryPath()
*/
protected void postPointerResolve(DataType definitionDt, DataTypeConflictHandler handler) {
throw new UnsupportedOperationException("post-resolve of pointers not implemented");
}
@Override
public CategoryPath getCategoryPath() {
validate(lock);
@ -374,9 +381,6 @@ abstract class DataTypeDB extends DatabaseObject implements DataType, ChangeList
}
}
/**
* @see ghidra.program.model.data.DataType#setNameAndCategory(ghidra.program.model.data.CategoryPath, java.lang.String)
*/
@Override
public void setNameAndCategory(CategoryPath path, String name)
throws InvalidNameException, DuplicateNameException {
@ -388,12 +392,14 @@ abstract class DataTypeDB extends DatabaseObject implements DataType, ChangeList
"DataType named " + name + " already exists in category " + path.getPath());
}
// generate a name that would not cause a duplicate in either the current path or
// the new path. Use the new name if possible.
// generate a name that would not cause a duplicate in either the current path
// or
// the new path. Use the new name if possible.
String uniqueName = dataMgr.getUniqueName(path, getCategoryPath(), name);
doSetName(uniqueName);
// set the path - this is guaranteed to work since we make a name that won't conflict
// set the path - this is guaranteed to work since we make a name that won't
// conflict
doSetCategoryPath(path);
// now, if necessary, rename it to the desired name - guaranteed to work since
@ -409,15 +415,13 @@ abstract class DataTypeDB extends DatabaseObject implements DataType, ChangeList
/**
* Updates the path for this datatype
*
* @param dt the dataType whose path has changed.
*/
protected void updatePath(DataTypeDB dt) {
// no-op
}
/**
* @see ghidra.program.model.data.DataType#addParent(ghidra.program.model.data.DataType)
*/
@Override
public void addParent(DataType dt) {
if (dt instanceof DataTypeDB && dt.getDataTypeManager() == dataMgr) {
@ -425,9 +429,6 @@ abstract class DataTypeDB extends DatabaseObject implements DataType, ChangeList
}
}
/**
* @see ghidra.program.model.data.DataType#removeParent(ghidra.program.model.data.DataType)
*/
@Override
public void removeParent(DataType dt) {
if (dt instanceof DataTypeDB && dt.getDataTypeManager() == dataMgr) {
@ -455,9 +456,6 @@ abstract class DataTypeDB extends DatabaseObject implements DataType, ChangeList
}
}
/**
* @see ghidra.program.model.data.DataType#getParents()
*/
@Override
public DataType[] getParents() {
List<DataType> parents = dataMgr.getParentDataTypes(key);
@ -465,9 +463,6 @@ abstract class DataTypeDB extends DatabaseObject implements DataType, ChangeList
return parents.toArray(array);
}
/**
* @see ghidra.program.model.data.DataType#dependsOn(ghidra.program.model.data.DataType)
*/
@Override
public boolean dependsOn(DataType dt) {
return false;
@ -522,9 +517,10 @@ abstract class DataTypeDB extends DatabaseObject implements DataType, ChangeList
}
/**
* Sets a String briefly describing this DataType.
* <br>If a data type that extends this class wants to allow the description to be changed,
* then it must override this method.
* Sets a String briefly describing this DataType. <br>
* If a data type that extends this class wants to allow the description to be
* changed, then it must override this method.
*
* @param description a one-liner describing this DataType.
*/
@Override
@ -533,11 +529,15 @@ abstract class DataTypeDB extends DatabaseObject implements DataType, ChangeList
}
/**
* setUniversalID is a package level method that allows you to change a data type's
* universal ID. This is only intended to be used when transforming a newly parsed data type
* archive so that it can be used as a replacement of the archive from a previous software release.
* @param oldUniversalID the old universal ID value that the user is already referencing
* with their data types. This is the universal ID that we want the new data type to be known by.
* setUniversalID is a package level method that allows you to change a data
* type's universal ID. This is only intended to be used when transforming a
* newly parsed data type archive so that it can be used as a replacement of the
* archive from a previous software release.
*
* @param oldUniversalID the old universal ID value that the user is already
* referencing with their data types. This is the
* universal ID that we want the new data type to be known
* by.
*/
abstract void setUniversalID(UniversalID oldUniversalID);

View File

@ -18,7 +18,6 @@ package ghidra.program.database.data;
import java.util.*;
import java.util.regex.Pattern;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.docking.settings.Settings;
import ghidra.program.model.address.GlobalNamespace;
import ghidra.program.model.data.*;
@ -157,30 +156,37 @@ public class DataTypeUtilities {
if (firstDataType.equals(secondDataType)) {
return true;
}
else if (firstDataType instanceof Array) {
if (firstDataType instanceof Array) {
DataType elementDataType = ((Array) firstDataType).getDataType();
return isSecondPartOfFirst(elementDataType, secondDataType);
}
else if (firstDataType instanceof TypeDef) {
if (firstDataType instanceof TypeDef) {
DataType innerDataType = ((TypeDef) firstDataType).getDataType();
return isSecondPartOfFirst(innerDataType, secondDataType);
}
else if (firstDataType instanceof Composite) {
if (firstDataType instanceof Composite) {
Composite compositeDataType = (Composite) firstDataType;
int numComponents = compositeDataType.getNumComponents();
for (int i = 0; i < numComponents; i++) {
DataTypeComponent dtc = compositeDataType.getComponent(i);
for (DataTypeComponent dtc : compositeDataType.getDefinedComponents()) {
DataType dataTypeToCheck = dtc.getDataType();
if (isSecondPartOfFirst(dataTypeToCheck, secondDataType)) {
return true;
}
}
if (firstDataType instanceof Structure) {
DataTypeComponent flexDtc = ((Structure) firstDataType).getFlexibleArrayComponent();
if (flexDtc != null && isSecondPartOfFirst(flexDtc.getDataType(), secondDataType)) {
return true;
}
}
}
return false;
}
/**
* Returns true if the two dataTypes have the same sourceArchive and the same UniversalID.
* Returns true if the two dataTypes have the same sourceArchive and the same UniversalID
* @param dataType1 first data type
* @param dataType2 second data type
* @return true if types correspond to the same type from a source archive
*/
public static boolean isSameDataType(DataType dataType1, DataType dataType2) {
UniversalID id1 = dataType1.getUniversalID();
@ -203,10 +209,15 @@ public class DataTypeUtilities {
/**
* Returns true if the two dataTypes have the same sourceArchive and the same UniversalID OR
* are equivalent
* @param dataType1 first data type (if invoked by DB object or manager, this argument
* must correspond to the DataTypeDB).
* @param dataType2 second data type
* @return true if types correspond to the same type from a source archive
* or they are equivelent, otherwise false
*/
public static boolean isSameOrEquivalentDataType(DataType dataType1, DataType dataType2) {
// if they contain datatypes that have same ids, then they represent the same dataType
if (DataTypeUtilities.isSameDataType(dataType1, dataType2)) {
if (isSameDataType(dataType1, dataType2)) {
return true;
}
// otherwise, check if they are equivalent

View File

@ -46,7 +46,7 @@ class EnumDB extends DataTypeDB implements Enum {
private List<BitGroup> bitGroups;
EnumDB(DataTypeManagerDB dataMgr, DBObjectCache<DataTypeDB> cache, EnumDBAdapter adapter,
EnumValueDBAdapter valueAdapter, Record record) throws IOException {
EnumValueDBAdapter valueAdapter, Record record) {
super(dataMgr, cache, record);
this.adapter = adapter;
this.valueAdapter = valueAdapter;
@ -86,8 +86,8 @@ class EnumDB extends DataTypeDB implements Enum {
long[] ids = valueAdapter.getValueIdsInEnum(key);
for (int i = 0; i < ids.length; i++) {
Record rec = valueAdapter.getRecord(ids[i]);
for (long id : ids) {
Record rec = valueAdapter.getRecord(id);
String valueName = rec.getString(EnumValueDBAdapter.ENUMVAL_NAME_COL);
long value = rec.getLongValue(EnumValueDBAdapter.ENUMVAL_VALUE_COL);
addToCache(valueName, value);
@ -252,10 +252,10 @@ class EnumDB extends DataTypeDB implements Enum {
long[] ids = valueAdapter.getValueIdsInEnum(key);
for (int i = 0; i < ids.length; i++) {
Record rec = valueAdapter.getRecord(ids[i]);
for (long id : ids) {
Record rec = valueAdapter.getRecord(id);
if (valueName.equals(rec.getString(EnumValueDBAdapter.ENUMVAL_NAME_COL))) {
valueAdapter.removeRecord(ids[i]);
valueAdapter.removeRecord(id);
break;
}
}
@ -285,8 +285,8 @@ class EnumDB extends DataTypeDB implements Enum {
valueMap = new HashMap<>();
long[] ids = valueAdapter.getValueIdsInEnum(key);
for (int i = 0; i < ids.length; i++) {
valueAdapter.removeRecord(ids[i]);
for (long id : ids) {
valueAdapter.removeRecord(id);
}
int oldLength = getLength();
@ -298,11 +298,11 @@ class EnumDB extends DataTypeDB implements Enum {
}
String[] names = enumm.getNames();
for (int i = 0; i < names.length; i++) {
long value = enumm.getValue(names[i]);
valueAdapter.createRecord(key, names[i], value);
for (String name2 : names) {
long value = enumm.getValue(name2);
valueAdapter.createRecord(key, name2, value);
adapter.updateRecord(record, true);
addToCache(names[i], value);
addToCache(name2, value);
}
if (oldLength != newLength) {

View File

@ -177,15 +177,24 @@ class FunctionDefinitionDB extends DataTypeDB implements FunctionDefinition {
}
private void doReplaceWith(FunctionDefinition functionDefinition) {
setArguments(functionDefinition.getArguments());
lock.acquire();
try {
setReturnType(functionDefinition.getReturnType());
checkDeleted();
setArguments(functionDefinition.getArguments());
try {
setReturnType(functionDefinition.getReturnType());
}
catch (IllegalArgumentException e) {
setReturnType(DEFAULT);
}
setVarArgs(functionDefinition.hasVarArgs());
setGenericCallingConvention(functionDefinition.getGenericCallingConvention());
}
catch (IllegalArgumentException e) {
setReturnType(DEFAULT);
finally {
lock.release();
}
setVarArgs(functionDefinition.hasVarArgs());
setGenericCallingConvention(functionDefinition.getGenericCallingConvention());
}
@Override
@ -251,7 +260,7 @@ class FunctionDefinitionDB extends DataTypeDB implements FunctionDefinition {
for (int i = 0; i < args.length; i++) {
DataType type =
ParameterDefinitionImpl.validateDataType(args[i].getDataType(), dataMgr, false);
DataType resolvedDt = resolve(type);
DataType resolvedDt = resolve(type, dataMgr.getCurrentConflictHandler());
paramAdapter.createRecord(dataMgr.getID(resolvedDt), key, i, args[i].getName(),
args[i].getComment(), args[i].getLength());
resolvedDt.addParent(this);
@ -278,7 +287,7 @@ class FunctionDefinitionDB extends DataTypeDB implements FunctionDefinition {
if (type == null) {
type = DataType.DEFAULT;
}
DataType resolvedDt = resolve(type);
DataType resolvedDt = resolve(type, dataMgr.getCurrentConflictHandler());
record.setLongValue(FunctionDefinitionDBAdapter.FUNCTION_DEF_RETURN_ID_COL,
dataMgr.getID(resolvedDt));
funDefAdapter.updateRecord(record, true);
@ -332,22 +341,35 @@ class FunctionDefinitionDB extends DataTypeDB implements FunctionDefinition {
}
@Override
public boolean isEquivalent(DataType dt) {
if (dt == this) {
public boolean isEquivalent(DataType dataType) {
if (dataType == this) {
return true;
}
if (!(dt instanceof FunctionDefinition)) {
if (!(dataType instanceof FunctionDefinition)) {
return false;
}
checkIsValid();
if (resolving) {
if (dt.getUniversalID().equals(getUniversalID())) {
if (resolving) { // actively resolving children
if (dataType.getUniversalID().equals(getUniversalID())) {
return true;
}
return DataTypeUtilities.equalsIgnoreConflict(getPathName(), dt.getPathName());
return DataTypeUtilities.equalsIgnoreConflict(getPathName(), dataType.getPathName());
}
return isEquivalentSignature((FunctionSignature) dt);
Boolean isEquivalent = dataMgr.getCachedEquivalence(this, dataType);
if (isEquivalent != null) {
return isEquivalent;
}
try {
isEquivalent = isEquivalentSignature((FunctionSignature) dataType);
}
finally {
dataMgr.putCachedEquivalence(this, dataType, isEquivalent);
}
return isEquivalent;
}
@Override
@ -360,15 +382,15 @@ class FunctionDefinitionDB extends DataTypeDB implements FunctionDefinition {
if ((DataTypeUtilities.equalsIgnoreConflict(signature.getName(), getName())) &&
((comment == null && myComment == null) ||
(comment != null && comment.equals(myComment))) &&
(DataTypeUtilities.isSameOrEquivalentDataType(signature.getReturnType(),
getReturnType())) &&
(DataTypeUtilities.isSameOrEquivalentDataType(getReturnType(),
signature.getReturnType())) &&
(getGenericCallingConvention() == signature.getGenericCallingConvention()) &&
(hasVarArgs() == signature.hasVarArgs())) {
ParameterDefinition[] args = signature.getArguments();
ParameterDefinition[] thisArgs = this.getArguments();
if (args.length == thisArgs.length) {
for (int i = 0; i < args.length; i++) {
if (!args[i].isEquivalent(thisArgs[i])) {
if (!thisArgs[i].isEquivalent(args[i])) {
return false;
}
}
@ -450,7 +472,7 @@ class FunctionDefinitionDB extends DataTypeDB implements FunctionDefinition {
param.getDataType().removeParent(this);
paramAdapter.removeRecord(param.getKey());
}
DataType rdt = resolve(dt);
DataType rdt = resolve(dt, dataMgr.getCurrentConflictHandler());
rdt.addParent(this);
paramAdapter.createRecord(dataMgr.getID(rdt), key, ordinal, name, comment,
dt.getLength());
@ -652,4 +674,9 @@ class FunctionDefinitionDB extends DataTypeDB implements FunctionDefinition {
lock.release();
}
}
@Override
public String toString() {
return getPrototypeString(true);
}
}

View File

@ -40,14 +40,15 @@ class PointerDB extends DataTypeDB implements Pointer {
private String displayName;
/**
* <code>isEquivalentActive</code> is used to break cyclical recursion
* when performing an {@link #isEquivalent(DataType)} checks on pointers
* which must also check the base datatype equivelency.
* <code>isEquivalentActive</code> is used to break cyclical recursion when
* performing an {@link #isEquivalent(DataType)} checks on pointers which must
* also check the base datatype equivelency.
*/
private ThreadLocal<Boolean> isEquivalentActive = ThreadLocal.withInitial(() -> Boolean.FALSE);
/**
* Constructor
*
* @param dataMgr
* @param cache
* @param adapter
@ -307,6 +308,15 @@ class PointerDB extends DataTypeDB implements Pointer {
return false;
}
// TODO: The pointer deep-dive equivalence checking on the referenced datatype can
// cause types containing pointers (composites, functions) to conflict when in
// reality the referenced type simply has multiple implementations which differ.
// Although without doing this Ghidra may fail to resolve dependencies which differ
// from those already contained within a datatype manager.
// Ghidra's rigid datatype relationships prevent the flexibility to handle
// multiple implementations of a named datatype without inducing a conflicted
// datatype hierarchy.
if (isEquivalentActive.get()) {
return true;
}
@ -359,11 +369,12 @@ class PointerDB extends DataTypeDB implements Pointer {
/**
* @see ghidra.program.model.data.DataType#setCategoryPath(ghidra.program.model.data.CategoryPath)
*
* Note: this does get called, but in a tricky way. If externally, someone calls
* setCategoryPath, nothing happens because it is overridden in this class to do nothing.
* However, if updatePath is called, then this method calls super.setCategoryPath which
* bypasses the "overriddenness" of setCategoryPath, resulting in this method getting called.
* Note: this does get called, but in a tricky way. If externally, someone
* calls setCategoryPath, nothing happens because it is overridden in this
* class to do nothing. However, if updatePath is called, then this method
* calls super.setCategoryPath which bypasses the "overriddenness" of
* setCategoryPath, resulting in this method getting called.
*
*/
@Override
protected void doSetCategoryPathRecord(long categoryID) throws IOException {

View File

@ -20,7 +20,7 @@ import java.util.Iterator;
import java.util.List;
import db.*;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.program.model.data.SourceArchive;
import ghidra.util.UniversalID;
import ghidra.util.exception.VersionException;
import ghidra.util.task.TaskMonitor;

View File

@ -1,6 +1,5 @@
/* ###
* IP: GHIDRA
* REVIEWED: YES
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -16,8 +15,8 @@
*/
package ghidra.program.database.data;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.program.model.data.DataTypeManager;
import ghidra.program.model.data.SourceArchive;
import ghidra.util.UniversalID;
import ghidra.util.exception.VersionException;

View File

@ -19,8 +19,8 @@ import java.io.IOException;
import java.util.*;
import db.*;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.program.model.data.DataTypeManager;
import ghidra.program.model.data.SourceArchive;
import ghidra.util.UniversalID;
import ghidra.util.exception.VersionException;

View File

@ -18,11 +18,9 @@ package ghidra.program.database.data;
import java.io.IOException;
import db.Record;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.program.database.DBObjectCache;
import ghidra.program.database.DatabaseObject;
import ghidra.program.model.data.ArchiveType;
import ghidra.program.model.data.DataTypeManager;
import ghidra.program.model.data.*;
import ghidra.util.Lock;
import ghidra.util.UniversalID;

View File

@ -18,9 +18,7 @@ package ghidra.program.database.data;
import java.util.HashMap;
import java.util.Map;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.program.model.data.ArchiveType;
import ghidra.program.model.data.DataTypeManager;
import ghidra.program.model.data.*;
import ghidra.program.model.lang.CompilerSpec;
import ghidra.program.model.lang.CompilerSpecID;
import ghidra.util.UniversalID;

View File

@ -26,7 +26,6 @@ import ghidra.program.model.data.AlignedStructurePacker.StructurePackResult;
import ghidra.program.model.mem.MemBuffer;
import ghidra.util.Msg;
import ghidra.util.exception.AssertException;
import ghidra.util.exception.InvalidInputException;
/**
* Structure implementation for the Database.
@ -47,6 +46,7 @@ class StructureDB extends CompositeDB implements Structure {
/**
* Constructor
*
* @param dataMgr
* @param cache
* @param compositeAdapter
@ -96,73 +96,59 @@ class StructureDB extends CompositeDB implements Structure {
}
@Override
public DataTypeComponent add(DataType dataType, int length, String name, String comment) {
return doAdd(dataType, length, false, name, comment, true);
public DataTypeComponent add(DataType dataType, int length, String name, String comment)
throws IllegalArgumentException {
try {
return doAdd(dataType, length, name, comment, true);
}
catch (DataTypeDependencyException e) {
throw new IllegalArgumentException(e.getMessage(), e);
}
}
private DataTypeComponent doAdd(DataType dataType, int length, boolean isFlexibleArray,
String name, String comment, boolean alignAndNotify) {
private DataTypeComponent doAdd(DataType dataType, int length, String name, String comment,
boolean validateAlignAndNotify)
throws DataTypeDependencyException, IllegalArgumentException {
lock.acquire();
try {
checkDeleted();
validateDataType(dataType);
dataType = resolve(dataType);
checkAncestry(dataType);
if (validateAlignAndNotify) {
validateDataType(dataType);
dataType = resolve(dataType, null);
checkAncestry(dataType);
}
DataTypeComponentDB dtc = null;
try {
if (dataType == DataType.DEFAULT && !isFlexibleArray) {
if (dataType == DataType.DEFAULT) {
dtc = new DataTypeComponentDB(dataMgr, componentAdapter, this, key,
numComponents, structLength);
}
else {
int offset = structLength;
int ordinal = numComponents;
int componentLength;
if (isFlexibleArray) {
// assume trailing flexible array component
offset = -1;
ordinal = -1;
if (flexibleArrayComponent != null) {
flexibleArrayComponent.getDataType().removeParent(this);
componentAdapter.removeRecord(flexibleArrayComponent.getKey());
flexibleArrayComponent = null;
}
componentLength = 0;
}
else {
componentLength = getPreferredComponentLength(dataType, length);
}
int componentLength = getPreferredComponentLength(dataType, length);
Record rec = componentAdapter.createRecord(dataMgr.getResolvedID(dataType), key,
componentLength, ordinal, offset, name, comment);
componentLength, numComponents, structLength, name, comment);
dtc = new DataTypeComponentDB(dataMgr, componentAdapter, this, rec);
dataType.addParent(this);
if (isFlexibleArray) {
flexibleArrayComponent = dtc;
}
else {
components.add(dtc);
}
components.add(dtc);
}
if (!isFlexibleArray) {
int structureGrowth = dtc.getLength();
if (!isInternallyAligned() && length > 0) {
structureGrowth = length;
}
int structureGrowth = dtc.getLength();
if (!isInternallyAligned() && length > 0) {
structureGrowth = length;
}
++numComponents;
structLength += structureGrowth;
if (validateAlignAndNotify) {
record.setIntValue(CompositeDBAdapter.COMPOSITE_NUM_COMPONENTS_COL,
++numComponents);
structLength += structureGrowth;
numComponents);
record.setIntValue(CompositeDBAdapter.COMPOSITE_LENGTH_COL, structLength);
compositeAdapter.updateRecord(record, true);
}
if (alignAndNotify) {
adjustInternalAlignment(false);
notifySizeChanged();
}
@ -177,6 +163,60 @@ class StructureDB extends CompositeDB implements Structure {
}
}
private DataTypeComponent doAddFlexArray(DataType dataType, String name, String comment,
boolean validateAlignAndNotify)
throws DataTypeDependencyException, IllegalArgumentException {
lock.acquire();
try {
checkDeleted();
if (validateAlignAndNotify) {
validateDataType(dataType);
dataType = resolve(dataType, null);
if (isInvalidFlexArrayDataType(dataType)) {
throw new IllegalArgumentException(
"Unsupported flexType: " + dataType.getDisplayName());
}
checkAncestry(dataType);
}
DataTypeComponentDB dtc = null;
try {
int oldLength = structLength;
if (flexibleArrayComponent != null) {
flexibleArrayComponent.getDataType().removeParent(this);
componentAdapter.removeRecord(flexibleArrayComponent.getKey());
flexibleArrayComponent = null;
}
Record rec = componentAdapter.createRecord(dataMgr.getResolvedID(dataType), key, 0,
-1, -1, name, comment);
dtc = new DataTypeComponentDB(dataMgr, componentAdapter, this, rec);
dataType.addParent(this);
flexibleArrayComponent = dtc;
if (validateAlignAndNotify) {
adjustInternalAlignment(false);
if (oldLength != structLength) {
notifySizeChanged();
}
else {
dataMgr.dataTypeChanged(this);
}
}
}
catch (IOException e) {
dataMgr.dbError(e);
}
return dtc;
}
finally {
lock.release();
}
}
@Override
public void growStructure(int amount) {
lock.acquire();
@ -237,7 +277,7 @@ class StructureDB extends CompositeDB implements Structure {
if (idx > 0) {
DataTypeComponentDB existingDtc = components.get(idx);
if (existingDtc.isBitFieldComponent()) {
// must shift down to eliminate possible overlap with previous component
// must shift down to eliminate possible overlap with previous component
DataTypeComponentDB previousDtc = components.get(idx - 1);
if (previousDtc.getEndOffset() == existingDtc.getOffset()) {
shiftOffsets(idx, 0, 1);
@ -268,6 +308,9 @@ class StructureDB extends CompositeDB implements Structure {
notifySizeChanged();
return dtc;
}
catch (DataTypeDependencyException e) {
throw new IllegalArgumentException(e.getMessage(), e);
}
catch (IOException e) {
dataMgr.dbError(e);
}
@ -642,9 +685,11 @@ class StructureDB extends CompositeDB implements Structure {
}
/**
* Create copy of structure for target dtm (source archive information is discarded).
* WARNING! copying unaligned structures which contain bitfields can produce
* invalid results when switching endianess due to the differences in packing order.
* Create copy of structure for target dtm (source archive information is
* discarded). WARNING! copying unaligned structures which contain bitfields can
* produce invalid results when switching endianess due to the differences in
* packing order.
*
* @param dtm target data type manager
* @return cloned structure
*/
@ -660,7 +705,9 @@ class StructureDB extends CompositeDB implements Structure {
/**
* Create cloned structure for target dtm preserving source archive information.
* WARNING! cloning unaligned structures which contain bitfields can produce
* invalid results when switching endianess due to the differences in packing order.
* invalid results when switching endianess due to the differences in packing
* order.
*
* @param dtm target data type manager
* @return cloned structure
*/
@ -746,12 +793,13 @@ class StructureDB extends CompositeDB implements Structure {
}
/**
* Backup from specified ordinal to the first component which contains
* the specified offset. For normal components the specified
* ordinal will be returned, however for bit-fields the ordinal of the first
* bit-field containing the specified offset will be returned.
* Backup from specified ordinal to the first component which contains the
* specified offset. For normal components the specified ordinal will be
* returned, however for bit-fields the ordinal of the first bit-field
* containing the specified offset will be returned.
*
* @param ordinal component ordinal
* @param offset offset within structure
* @param offset offset within structure
* @return index of first defined component containing specific offset.
*/
private int backupToFirstComponentContainingOffset(int index, int offset) {
@ -771,12 +819,13 @@ class StructureDB extends CompositeDB implements Structure {
}
/**
* Advance from specified ordinal to the last component which contains
* the specified offset. For normal components the specified
* ordinal will be returned, however for bit-fields the ordinal of the last
* bit-field containing the specified offset will be returned.
* Advance from specified ordinal to the last component which contains the
* specified offset. For normal components the specified ordinal will be
* returned, however for bit-fields the ordinal of the last bit-field containing
* the specified offset will be returned.
*
* @param ordinal component ordinal
* @param offset offset within structure
* @param offset offset within structure
* @return index of last defined component containing specific offset.
*/
private int advanceToLastComponentContainingOffset(int index, int offset) {
@ -887,11 +936,11 @@ class StructureDB extends CompositeDB implements Structure {
}
@Override
public DataTypeComponent[] getDefinedComponents() {
public DataTypeComponentDB[] getDefinedComponents() {
lock.acquire();
try {
checkIsValid();
return components.toArray(new DataTypeComponent[components.size()]);
return components.toArray(new DataTypeComponentDB[components.size()]);
}
finally {
lock.release();
@ -899,13 +948,14 @@ class StructureDB extends CompositeDB implements Structure {
}
@Override
public final DataTypeComponent insertAtOffset(int offset, DataType dataType, int length) {
public final DataTypeComponent insertAtOffset(int offset, DataType dataType, int length)
throws IllegalArgumentException {
return insertAtOffset(offset, dataType, length, null, null);
}
@Override
public DataTypeComponent insertAtOffset(int offset, DataType dataType, int length, String name,
String comment) {
String comment) throws IllegalArgumentException {
if (offset < 0) {
throw new IllegalArgumentException("Offset cannot be negative.");
@ -978,6 +1028,9 @@ class StructureDB extends CompositeDB implements Structure {
notifySizeChanged();
return dtc;
}
catch (DataTypeDependencyException e) {
throw new IllegalArgumentException(e.getMessage(), e);
}
catch (IOException e) {
dataMgr.dbError(e);
}
@ -989,7 +1042,7 @@ class StructureDB extends CompositeDB implements Structure {
@Override
public DataTypeComponent replace(int ordinal, DataType dataType, int length, String name,
String comment) {
String comment) throws IllegalArgumentException {
lock.acquire();
try {
checkDeleted();
@ -1021,19 +1074,23 @@ class StructureDB extends CompositeDB implements Structure {
adjustInternalAlignment(true);
return replaceComponent;
}
catch (DataTypeDependencyException e) {
throw new IllegalArgumentException(e.getMessage(), e);
}
finally {
lock.release();
}
}
@Override
public final DataTypeComponent replace(int ordinal, DataType dataType, int length) {
public final DataTypeComponent replace(int ordinal, DataType dataType, int length)
throws IllegalArgumentException {
return replace(ordinal, dataType, length, null, null);
}
@Override
public DataTypeComponent replaceAtOffset(int offset, DataType dataType, int length, String name,
String comment) {
String comment) throws IllegalArgumentException {
if (offset < 0) {
throw new IllegalArgumentException("Offset cannot be negative.");
}
@ -1071,6 +1128,9 @@ class StructureDB extends CompositeDB implements Structure {
adjustInternalAlignment(true);
return replaceComponent;
}
catch (DataTypeDependencyException e) {
throw new IllegalArgumentException(e.getMessage(), e);
}
finally {
lock.release();
}
@ -1079,12 +1139,14 @@ class StructureDB extends CompositeDB implements Structure {
/**
* Replaces the internal components of this structure with components of the
* given structure.
*
* @param dataType the structure to get the component information from.
* @throws IllegalArgumentException if any of the component data types
* are not allowed to replace a component in this composite data type.
* For example, suppose dt1 contains dt2. Therefore it is not valid
* to replace a dt2 component with dt1 since this would cause a cyclic
* dependency.
* @throws IllegalArgumentException if any of the component data types are not
* allowed to replace a component in this
* composite data type. For example, suppose
* dt1 contains dt2. Therefore it is not valid
* to replace a dt2 component with dt1 since
* this would cause a cyclic dependency.
* @see ghidra.program.database.data.DataTypeDB#replaceWith(ghidra.program.model.data.DataType)
*/
@Override
@ -1092,92 +1154,130 @@ class StructureDB extends CompositeDB implements Structure {
if (!(dataType instanceof Structure)) {
throw new IllegalArgumentException();
}
doReplaceWith((Structure) dataType, true, null);
}
/**
*
* @param struct
* @param notify
* @param handler
*/
void doReplaceWith(Structure struct, boolean notify, DataTypeConflictHandler handler) {
lock.acquire();
boolean isResolveCacheOwner = dataMgr.activateResolveCache();
try {
checkDeleted();
int oldLength = structLength;
int oldMinAlignment = getMinimumAlignment();
for (int i = 0; i < components.size(); i++) {
DataTypeComponentDB dtc = components.get(i);
dtc.getDataType().removeParent(this);
componentAdapter.removeRecord(dtc.getKey());
}
components.clear();
numComponents = 0;
structLength = 0;
if (flexibleArrayComponent != null) {
flexibleArrayComponent.getDataType().removeParent(this);
componentAdapter.removeRecord(flexibleArrayComponent.getKey());
flexibleArrayComponent = null;
}
setAlignment(struct, false);
if (struct.isInternallyAligned()) {
doReplaceWithAligned(struct);
}
else {
doReplaceWithUnaligned(struct);
}
DataTypeComponent flexComponent = struct.getFlexibleArrayComponent();
if (flexComponent != null) {
setFlexibleArrayComponent(flexComponent.getDataType(), flexComponent.getFieldName(),
flexComponent.getComment());
}
record.setIntValue(CompositeDBAdapter.COMPOSITE_NUM_COMPONENTS_COL, numComponents);
record.setIntValue(CompositeDBAdapter.COMPOSITE_LENGTH_COL, structLength);
compositeAdapter.updateRecord(record, false);
if (notify) {
if (oldMinAlignment != getMinimumAlignment()) {
notifyAlignmentChanged();
}
else if (oldLength != structLength) {
notifySizeChanged();
}
else {
dataMgr.dataTypeChanged(this);
}
}
doReplaceWith((Structure) dataType, true, dataMgr.getCurrentConflictHandler());
}
catch (DataTypeDependencyException e) {
throw new IllegalArgumentException(e.getMessage(), e);
}
catch (IOException e) {
dataMgr.dbError(e);
}
finally {
if (isResolveCacheOwner) {
dataMgr.flushResolveCacheAndClearQueue(null);
}
lock.release();
}
}
private void doReplaceWithAligned(Structure struct) {
/**
* Perform component replacement.
*
* @param struct
* @param notify
* @param handler
* @return true if fully completed else false if pointer component post resolve
* required
* @throws DataTypeDependencyException
* @throws IOException
*/
void doReplaceWith(Structure struct, boolean notify, DataTypeConflictHandler handler)
throws DataTypeDependencyException, IOException {
// pre-resolved component types to catch dependency issues early
DataTypeComponent flexComponent = struct.getFlexibleArrayComponent();
DataTypeComponent[] otherComponents = struct.getDefinedComponents();
DataType[] resolvedDts = new DataType[otherComponents.length];
for (int i = 0; i < otherComponents.length; i++) {
resolvedDts[i] = doCheckedResolve(otherComponents[i].getDataType(), handler);
}
DataType resolvedFlexDt = null;
if (flexComponent != null) {
resolvedFlexDt = doCheckedResolve(flexComponent.getDataType(), handler);
if (isInvalidFlexArrayDataType(resolvedFlexDt)) {
throw new IllegalArgumentException(
"Unsupported flexType: " + resolvedFlexDt.getDisplayName());
}
}
int oldLength = structLength;
int oldMinAlignment = getMinimumAlignment();
for (int i = 0; i < components.size(); i++) {
DataTypeComponentDB dtc = components.get(i);
dtc.getDataType().removeParent(this);
componentAdapter.removeRecord(dtc.getKey());
}
components.clear();
numComponents = 0;
structLength = 0;
if (flexibleArrayComponent != null) {
flexibleArrayComponent.getDataType().removeParent(this);
componentAdapter.removeRecord(flexibleArrayComponent.getKey());
flexibleArrayComponent = null;
}
setAlignment(struct, false);
if (struct.isInternallyAligned()) {
doReplaceWithAligned(struct, resolvedDts);
}
else {
doReplaceWithUnaligned(struct, resolvedDts);
}
if (flexComponent != null) {
doAddFlexArray(resolvedFlexDt, flexComponent.getFieldName(), flexComponent.getComment(),
false);
}
record.setIntValue(CompositeDBAdapter.COMPOSITE_NUM_COMPONENTS_COL, numComponents);
record.setIntValue(CompositeDBAdapter.COMPOSITE_LENGTH_COL, structLength);
compositeAdapter.updateRecord(record, false);
adjustInternalAlignment(false);
if (notify) {
if (oldMinAlignment != getMinimumAlignment()) {
notifyAlignmentChanged();
}
else if (oldLength != structLength) {
notifySizeChanged();
}
else {
dataMgr.dataTypeChanged(this);
}
}
if (pointerPostResolveRequired) {
dataMgr.queuePostResolve(this, struct);
}
}
private void doReplaceWithAligned(Structure struct, DataType[] resolvedDts) {
// assumes components is clear and that alignment characteristics have been set
DataTypeComponent[] otherComponents = struct.getDefinedComponents();
for (int i = 0; i < otherComponents.length; i++) {
DataTypeComponent dtc = otherComponents[i];
DataType dt = dtc.getDataType();
int length = (dt instanceof Dynamic) ? dtc.getLength() : -1;
doAdd(dt, length, false, dtc.getFieldName(), dtc.getComment(), false);
try {
doAdd(resolvedDts[i], length, dtc.getFieldName(), dtc.getComment(), false);
}
catch (DataTypeDependencyException e) {
throw new AssertException(e); // ancestry check already performed by caller
}
}
adjustInternalAlignment(false);
dataMgr.dataTypeChanged(this);
}
private void doReplaceWithUnaligned(Structure struct) throws IOException {
private void doReplaceWithUnaligned(Structure struct, DataType[] resolvedDts)
throws IOException {
// assumes components is clear and that alignment characteristics have been set.
if (struct.isNotYetDefined()) {
return;
@ -1190,8 +1290,7 @@ class StructureDB extends CompositeDB implements Structure {
for (int i = 0; i < otherComponents.length; i++) {
DataTypeComponent dtc = otherComponents[i];
DataType dt = resolve(dtc.getDataType());
checkAncestry(dt);
DataType dt = resolvedDts[i]; // ancestry check already performed by caller
int length = getPreferredComponentLength(dt, dtc.getLength());
@ -1205,6 +1304,28 @@ class StructureDB extends CompositeDB implements Structure {
adjustComponents(false);
}
@Override
protected void postPointerResolve(DataType definitionDt, DataTypeConflictHandler handler) {
Structure struct = (Structure) definitionDt;
if (struct.hasFlexibleArrayComponent() != hasFlexibleArrayComponent()) {
throw new IllegalArgumentException("mismatched definition datatype");
}
super.postPointerResolve(definitionDt, handler);
if (flexibleArrayComponent != null) {
DataTypeComponent flexDtc = struct.getFlexibleArrayComponent();
DataType dt = flexDtc.getDataType();
if (dt instanceof Pointer) {
flexibleArrayComponent.getDataType().removeParent(this);
dt = dataMgr.resolve(dt, handler);
flexibleArrayComponent.setDataType(dt);
dt.addParent(this);
}
}
}
@Override
public void dataTypeDeleted(DataType dt) {
lock.acquire();
@ -1261,7 +1382,7 @@ class StructureDB extends CompositeDB implements Structure {
DataTypeComponentDB dtc = components.get(i);
int nextIndex = i + 1;
if (dtc.getDataType() == dt) {
// assume no impact to bitfields since base types
// assume no impact to bitfields since base types
// should not change size
int dtLen = dt.getLength();
int dtcLen = dtc.getLength();
@ -1304,62 +1425,74 @@ class StructureDB extends CompositeDB implements Structure {
@Override
public boolean isEquivalent(DataType dataType) {
if (dataType == this) {
return true;
}
if (dataType == null || !(dataType instanceof Structure)) {
if (!(dataType instanceof Structure)) {
return false;
}
checkIsValid();
if (resolving) {
if (resolving) { // actively resolving children
if (dataType.getUniversalID().equals(getUniversalID())) {
return true;
}
return DataTypeUtilities.equalsIgnoreConflict(getPathName(), dataType.getPathName());
}
Structure struct = (Structure) dataType;
if (isInternallyAligned() != struct.isInternallyAligned() ||
isDefaultAligned() != struct.isDefaultAligned() ||
isMachineAligned() != struct.isMachineAligned() ||
getMinimumAlignment() != struct.getMinimumAlignment() ||
getPackingValue() != struct.getPackingValue() ||
(!isInternallyAligned() && (getLength() != struct.getLength()))) {
return false;
Boolean isEquivalent = dataMgr.getCachedEquivalence(this, dataType);
if (isEquivalent != null) {
return isEquivalent;
}
DataTypeComponent myFlexComp = getFlexibleArrayComponent();
DataTypeComponent otherFlexComp = struct.getFlexibleArrayComponent();
if (myFlexComp != null) {
if (otherFlexComp == null || !myFlexComp.isEquivalent(otherFlexComp)) {
try {
isEquivalent = false;
Structure struct = (Structure) dataType;
if (isInternallyAligned() != struct.isInternallyAligned() ||
isDefaultAligned() != struct.isDefaultAligned() ||
isMachineAligned() != struct.isMachineAligned() ||
getMinimumAlignment() != struct.getMinimumAlignment() ||
getPackingValue() != struct.getPackingValue() ||
(!isInternallyAligned() && (getLength() != struct.getLength()))) {
return false;
}
}
else if (otherFlexComp != null) {
return false;
}
int myNumComps = getNumComponents();
int otherNumComps = struct.getNumComponents();
if (myNumComps != otherNumComps) {
return false;
}
for (int i = 0; i < myNumComps; i++) {
DataTypeComponent myDtc = getComponent(i);
DataTypeComponent otherDtc = struct.getComponent(i);
if (!myDtc.isEquivalent(otherDtc)) {
DataTypeComponent myFlexComp = getFlexibleArrayComponent();
DataTypeComponent otherFlexComp = struct.getFlexibleArrayComponent();
if (myFlexComp != null) {
if (otherFlexComp == null || !myFlexComp.isEquivalent(otherFlexComp)) {
return false;
}
}
else if (otherFlexComp != null) {
return false;
}
int myNumComps = getNumComponents();
int otherNumComps = struct.getNumComponents();
if (myNumComps != otherNumComps) {
return false;
}
for (int i = 0; i < myNumComps; i++) {
DataTypeComponent myDtc = getComponent(i);
DataTypeComponent otherDtc = struct.getComponent(i);
if (!myDtc.isEquivalent(otherDtc)) {
return false;
}
}
isEquivalent = true;
}
finally {
dataMgr.putCachedEquivalence(this, dataType, isEquivalent);
}
return true;
}
/**
*
* @param definedComponentIndex the index of the defined component that is consuming the bytes.
* @param numBytes the number of undefined bytes to consume
* @param definedComponentIndex the index of the defined component that is
* consuming the bytes.
* @param numBytes the number of undefined bytes to consume
* @return the number of bytes actually consumed
*/
private int consumeBytesAfter(int definedComponentIndex, int numBytes) {
@ -1422,19 +1555,20 @@ class StructureDB extends CompositeDB implements Structure {
}
/**
* Replace the indicated component with a new component containing the
* specified data type. Flex-array component not handled.
* @param origDtc the original data type component in this structure.
* Replace the indicated component with a new component containing the specified
* data type. Flex-array component not handled.
*
* @param origDtc the original data type component in this structure.
* @param resolvedDataType the data type of the new component
* @param length the length of the new component
* @param name the field name of the new component
* @param comment the comment for the new component
* @param length the length of the new component
* @param name the field name of the new component
* @param comment the comment for the new component
* @return the new component or null if the new component couldn't fit.
*/
private DataTypeComponent replaceComponent(DataTypeComponent origDtc, DataType resolvedDataType,
int length, String name, String comment, boolean doNotify) {
// FIXME: Unsure how o support replace operation with bit-fields. Within unaligned structure
// FIXME: Unsure how to support replace operation with bit-fields. Within unaligned structure
// the packing behavior for bit-fields prevents a one-for-one replacement and things may shift
// around which the unaligned structure tries to avoid. Insert and delete are less of a concern
// since movement already can occur, although insert at offset may not retain the offset if it
@ -1506,8 +1640,9 @@ class StructureDB extends CompositeDB implements Structure {
/**
* Gets the number of Undefined bytes beginning at the indicated component
* ordinal. Undefined bytes that have a field name or comment specified are
* also included.
* ordinal. Undefined bytes that have a field name or comment specified are also
* included.
*
* @param ordinal the component ordinal to begin checking at.
* @return the number of contiguous undefined bytes
*/
@ -1653,7 +1788,8 @@ class StructureDB extends CompositeDB implements Structure {
comp.setLength(len, true);
shiftOffsets(nextIndex, -bytesNeeded, 0);
}
else if (comp.getOrdinal() == getLastDefinedComponentIndex()) { // we are the last defined component, grow structure
else if (comp.getOrdinal() == getLastDefinedComponentIndex()) {
// we are the last defined component, grow structure
doGrowStructure(bytesNeeded - bytesAvailable);
comp.setLength(len, true);
shiftOffsets(nextIndex, -bytesNeeded, 0);
@ -1711,8 +1847,9 @@ class StructureDB extends CompositeDB implements Structure {
}
/**
* <code>ComponentComparator</code> provides ability to compare two DataTypeComponent objects
* based upon their ordinal. Intended to be used to sort components based upon ordinal.
* <code>ComponentComparator</code> provides ability to compare two
* DataTypeComponent objects based upon their ordinal. Intended to be used to
* sort components based upon ordinal.
*/
private static class ComponentComparator implements Comparator<DataTypeComponent> {
@Override
@ -1722,15 +1859,17 @@ class StructureDB extends CompositeDB implements Structure {
}
/**
* Adjust the alignment, packing and padding of components within this structure based upon the
* current alignment and packing attributes for this structure. This method should be
* called to basically fix up the layout of the internal components of the structure
* after other code has changed the attributes of the structure.
* <BR>When switching between internally aligned and unaligned this method corrects the
* component ordinal numbering also.
* @param notify if true this method will do data type change notification
* when it changes the layout of the components or when it changes the
* overall size of the structure.
* Adjust the alignment, packing and padding of components within this structure
* based upon the current alignment and packing attributes for this structure.
* This method should be called to basically fix up the layout of the internal
* components of the structure after other code has changed the attributes of
* the structure. <BR>
* When switching between internally aligned and unaligned this method corrects
* the component ordinal numbering also.
*
* @param notify if true this method will do data type change notification when
* it changes the layout of the components or when it changes the
* overall size of the structure.
* @return true if the structure was changed by this method.
*/
private boolean adjustComponents(boolean notify) {
@ -1858,7 +1997,7 @@ class StructureDB extends CompositeDB implements Structure {
}
@Override
public void pack(int packingSize) throws InvalidInputException {
public void pack(int packingSize) {
setPackingValue(packingSize);
}
@ -1873,7 +2012,7 @@ class StructureDB extends CompositeDB implements Structure {
}
@Override
public DataTypeComponent getFlexibleArrayComponent() {
public DataTypeComponentDB getFlexibleArrayComponent() {
return flexibleArrayComponent;
}
@ -1885,12 +2024,17 @@ class StructureDB extends CompositeDB implements Structure {
@Override
public DataTypeComponent setFlexibleArrayComponent(DataType flexType, String name,
String comment) {
String comment) throws IllegalArgumentException {
if (isInvalidFlexArrayDataType(flexType)) {
throw new IllegalArgumentException(
"Unsupported flexType: " + flexType.getDisplayName());
}
return doAdd(flexType, 0, true, name, comment, true);
try {
return doAddFlexArray(flexType, name, comment, true);
}
catch (DataTypeDependencyException e) {
throw new IllegalArgumentException(e.getMessage(), e);
}
}
@Override

View File

@ -27,8 +27,6 @@ import ghidra.util.Msg;
/**
* Database implementation for the Union data type.
*
*
*/
class UnionDB extends CompositeDB implements Union {
@ -38,6 +36,7 @@ class UnionDB extends CompositeDB implements Union {
/**
* Constructor
*
* @param dataMgr
* @param cache
* @param compositeAdapter
@ -57,8 +56,8 @@ class UnionDB extends CompositeDB implements Union {
try {
long[] ids = componentAdapter.getComponentIdsInComposite(key);
for (int i = 0; i < ids.length; i++) {
Record rec = componentAdapter.getRecord(ids[i]);
for (long id : ids) {
Record rec = componentAdapter.getRecord(id);
components.add(new DataTypeComponentDB(dataMgr, componentAdapter, this, rec));
}
}
@ -84,14 +83,18 @@ class UnionDB extends CompositeDB implements Union {
}
@Override
public DataTypeComponent add(DataType dataType, int length, String name, String comment) {
public DataTypeComponent add(DataType dataType, int length, String componentName,
String comment) throws IllegalArgumentException {
lock.acquire();
try {
checkDeleted();
DataTypeComponent dtc = doAdd(dataType, length, name, comment);
DataTypeComponent dtc = doAdd(dataType, length, componentName, comment, true);
adjustLength(true, true);
return dtc;
}
catch (DataTypeDependencyException e) {
throw new IllegalArgumentException(e.getMessage(), e);
}
finally {
lock.release();
}
@ -117,14 +120,17 @@ class UnionDB extends CompositeDB implements Union {
return length;
}
private DataTypeComponent doAdd(DataType dataType, int length, String name, String comment) {
private DataTypeComponent doAdd(DataType dataType, int length, String name, String comment,
boolean validateAlignAndNotify) throws DataTypeDependencyException {
validateDataType(dataType);
dataType = adjustBitField(dataType);
dataType = resolve(dataType);
checkAncestry(dataType);
if (validateAlignAndNotify) {
dataType = resolve(dataType);
checkAncestry(dataType);
}
length = getPreferredComponentLength(dataType, length);
@ -161,7 +167,7 @@ class UnionDB extends CompositeDB implements Union {
@Override
public DataTypeComponent insert(int ordinal, DataType dataType, int length, String name,
String comment) {
String comment) throws IllegalArgumentException {
lock.acquire();
try {
checkDeleted();
@ -183,6 +189,9 @@ class UnionDB extends CompositeDB implements Union {
adjustLength(true, true);
return dtc;
}
catch (DataTypeDependencyException e) {
throw new IllegalArgumentException(e.getMessage(), e);
}
finally {
lock.release();
}
@ -232,56 +241,75 @@ class UnionDB extends CompositeDB implements Union {
}
}
/**
* Replaces the internal components of this union with components of the
* given union.
* @param dataType the union to get the component information from.
* @throws IllegalArgumentException if any of the component data types
* are not allowed to replace a component in this composite data type.
* For example, suppose dt1 contains dt2. Therefore it is not valid
* to replace a dt2 component with dt1 since this would cause a cyclic
* dependency.
*/
@Override
public void replaceWith(DataType dataType) {
if (!(dataType instanceof Union)) {
throw new IllegalArgumentException();
}
doReplaceWith((Union) dataType, true, null);
}
void doReplaceWith(Union union, boolean notify, DataTypeConflictHandler handler) {
lock.acquire();
boolean isResolveCacheOwner = dataMgr.activateResolveCache();
try {
checkDeleted();
long oldMinAlignment = getMinimumAlignment();
for (int i = 0; i < components.size(); i++) {
DataTypeComponentDB dtc = components.get(i);
dtc.getDataType().removeParent(this);
removeComponent(dtc.getKey());
}
components.clear();
setAlignment(union, notify);
for (DataTypeComponent dtc : union.getComponents()) {
DataType dt = dtc.getDataType();
doAdd(dt, dtc.getLength(), dtc.getFieldName(), dtc.getComment());
}
adjustLength(notify, true); // TODO: VERIFY! is it always appropriate to set update time??
if (notify && (oldMinAlignment != getMinimumAlignment())) {
notifyAlignmentChanged();
}
doReplaceWith((Union) dataType, true, dataMgr.getCurrentConflictHandler());
}
catch (DataTypeDependencyException e) {
throw new IllegalArgumentException(e.getMessage(), e);
}
finally {
if (isResolveCacheOwner) {
dataMgr.flushResolveCacheAndClearQueue(null);
}
lock.release();
}
}
void doReplaceWith(Union union, boolean notify, DataTypeConflictHandler handler)
throws DataTypeDependencyException {
// pre-resolved component types to catch dependency issues early
DataTypeComponent[] otherComponents = union.getComponents();
DataType[] resolvedDts = new DataType[otherComponents.length];
for (int i = 0; i < otherComponents.length; i++) {
resolvedDts[i] = doCheckedResolve(otherComponents[i].getDataType(), handler);
checkAncestry(resolvedDts[i]);
}
int oldLength = unionLength;
int oldMinAlignment = getMinimumAlignment();
for (int i = 0; i < components.size(); i++) {
DataTypeComponentDB dtc = components.get(i);
dtc.getDataType().removeParent(this);
removeComponent(dtc.getKey());
}
components.clear();
setAlignment(union, false);
for (int i = 0; i < otherComponents.length; i++) {
DataTypeComponent dtc = otherComponents[i];
doAdd(resolvedDts[i], dtc.getLength(), dtc.getFieldName(), dtc.getComment(), false);
}
adjustLength(false, false);
if (notify) {
if (oldMinAlignment != getMinimumAlignment()) {
notifyAlignmentChanged();
}
else if (oldLength != unionLength) {
notifySizeChanged();
}
else {
dataMgr.dataTypeChanged(this);
}
}
if (pointerPostResolveRequired) {
dataMgr.queuePostResolve(this, union);
}
}
@Override
public boolean isPartOf(DataType dataType) {
lock.acquire();
@ -320,6 +348,11 @@ class UnionDB extends CompositeDB implements Union {
}
}
@Override
public int getNumDefinedComponents() {
return getNumComponents();
}
@Override
public DataTypeComponent getComponent(int ordinal) {
lock.acquire();
@ -336,17 +369,22 @@ class UnionDB extends CompositeDB implements Union {
}
@Override
public DataTypeComponent[] getComponents() {
public DataTypeComponentDB[] getComponents() {
lock.acquire();
try {
checkIsValid();
return components.toArray(new DataTypeComponent[components.size()]);
return components.toArray(new DataTypeComponentDB[components.size()]);
}
finally {
lock.release();
}
}
@Override
public DataTypeComponentDB[] getDefinedComponents() {
return getComponents();
}
@Override
public DataType copy(DataTypeManager dtm) {
UnionDataType union = new UnionDataType(getCategoryPath(), getName(), dtm);
@ -419,7 +457,7 @@ class UnionDB extends CompositeDB implements Union {
baseDataType = resolve(baseDataType);
// Both aligned and unaligned bitfields use same adjustment
// unaligned must force bitfield placement at byte offset 0
// unaligned must force bitfield placement at byte offset 0
int bitSize = bitfieldDt.getDeclaredBitSize();
int effectiveBitSize =
BitFieldDataType.getEffectiveBitSize(bitSize, baseDataType.getLength());
@ -515,40 +553,54 @@ class UnionDB extends CompositeDB implements Union {
}
@Override
public boolean isEquivalent(DataType dt) {
if (dt == this) {
public boolean isEquivalent(DataType dataType) {
if (dataType == this) {
return true;
}
if (dt == null || !(dt instanceof Union)) {
if (!(dataType instanceof Union)) {
return false;
}
checkIsValid();
if (resolving) {
if (dt.getUniversalID().equals(getUniversalID())) {
if (resolving) { // actively resolving children
if (dataType.getUniversalID().equals(getUniversalID())) {
return true;
}
return DataTypeUtilities.equalsIgnoreConflict(getPathName(), dt.getPathName());
return DataTypeUtilities.equalsIgnoreConflict(getPathName(), dataType.getPathName());
}
Union union = (Union) dt;
if (isInternallyAligned() != union.isInternallyAligned() ||
isDefaultAligned() != union.isDefaultAligned() ||
isMachineAligned() != union.isMachineAligned() ||
getMinimumAlignment() != union.getMinimumAlignment() ||
getPackingValue() != union.getPackingValue()) {
// rely on component match instead of checking length
// since dynamic component sizes could affect length
return false;
Boolean isEquivalent = dataMgr.getCachedEquivalence(this, dataType);
if (isEquivalent != null) {
return isEquivalent;
}
DataTypeComponent[] myComps = getComponents();
DataTypeComponent[] otherComps = union.getComponents();
if (myComps.length != otherComps.length) {
return false;
}
for (int i = 0; i < myComps.length; i++) {
if (!myComps[i].isEquivalent(otherComps[i])) {
try {
isEquivalent = false;
Union union = (Union) dataType;
if (isInternallyAligned() != union.isInternallyAligned() ||
isDefaultAligned() != union.isDefaultAligned() ||
isMachineAligned() != union.isMachineAligned() ||
getMinimumAlignment() != union.getMinimumAlignment() ||
getPackingValue() != union.getPackingValue()) {
// rely on component match instead of checking length
// since dynamic component sizes could affect length
return false;
}
DataTypeComponent[] myComps = getComponents();
DataTypeComponent[] otherComps = union.getComponents();
if (myComps.length != otherComps.length) {
return false;
}
for (int i = 0; i < myComps.length; i++) {
if (!myComps[i].isEquivalent(otherComps[i])) {
return false;
}
}
isEquivalent = true;
}
finally {
dataMgr.putCachedEquivalence(this, dataType, isEquivalent);
}
return true;
}

View File

@ -261,7 +261,6 @@ public class ExternalManagerDB implements ManagerDB, ExternalManager {
SourceType locSourceType = checkExternalLabel(extLabel, extAddr, sourceType);
lock.acquire();
try {
// FIXME:
Namespace libraryScope = getLibraryScope(extLibraryName);
if (libraryScope == null) {
libraryScope = addExternalName(extLibraryName, null,

View File

@ -1,6 +1,5 @@
/* ###
* IP: GHIDRA
* REVIEWED: YES
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -16,6 +15,9 @@
*/
package ghidra.program.model.block;
import java.util.ArrayList;
import java.util.LinkedList;
import ghidra.program.model.address.Address;
import ghidra.program.model.address.AddressSet;
import ghidra.program.model.listing.Program;
@ -23,9 +25,6 @@ import ghidra.program.model.symbol.FlowType;
import ghidra.util.exception.CancelledException;
import ghidra.util.task.TaskMonitor;
import java.util.ArrayList;
import java.util.LinkedList;
/**
* <CODE>IsolatedEntryCodeSubModel</CODE> (S-model) defines subroutines with a
* unique entry point, which may share code with other subroutines. Each entry-
@ -34,7 +33,7 @@ import java.util.LinkedList;
* the set of addresses contained within each subroutine. Unlike the
* OverlapCodeSubModel, the address set of a IsolatedEntryCodeSubModel
* subroutine is permitted to span entry-points of other subroutines based upon
* the possible flows from its' entry- point.
* the possible flows from its entry- point.
*
* @see ghidra.program.model.block.CodeBlockModel
* @see ghidra.program.model.block.OverlapCodeSubModel
@ -79,13 +78,15 @@ public class IsolatedEntrySubModel extends OverlapCodeSubModel {
// Create address list which contains all other entry points for this M-model sub
CodeBlock mSub = modelM.getCodeBlockAt(mStartAddr, monitor);
if (mSub == null)
return null;
if (mSub == null) {
return null;
}
Address[] mEntryPts = mSub.getStartAddresses();
ArrayList<Address> startSet = new ArrayList<Address>();
for (int i = 0; i < mEntryPts.length; i++) {
if (!mStartAddr.equals(mEntryPts[i]))
startSet.add(mEntryPts[i]);
for (Address mEntryPt : mEntryPts) {
if (!mStartAddr.equals(mEntryPt)) {
startSet.add(mEntryPt);
}
}
// create a holder for the blockSet
@ -100,20 +101,25 @@ public class IsolatedEntrySubModel extends OverlapCodeSubModel {
// Build model-S subroutine from basic blocks
while (!todoList.isEmpty()) {
if (monitor.isCancelled())
throw new CancelledException();
if (monitor.isCancelled()) {
throw new CancelledException();
}
// Get basic block at the specified address
Address a = todoList.removeLast();
if (addrSet.contains(a) || startSet.contains(a)) // <<-- only difference from Model-O
continue; // already processed this block or encountered another Model-M entry point
if (addrSet.contains(a) || startSet.contains(a))
{
continue; // already processed this block or encountered another Model-M entry point
}
CodeBlock bblock = bbModel.getFirstCodeBlockContaining(a, monitor);
if (bblock == null)
continue;
if (bblock == null) {
continue;
}
// Verify that the block contains instructions
if (listing.getInstructionAt(a) == null)
continue;
if (listing.getInstructionAt(a) == null) {
continue;
}
// Add basic block to subroutine address set
addrSet.add(bblock);

View File

@ -1,6 +1,5 @@
/* ###
* IP: GHIDRA
* REVIEWED: YES
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -16,6 +15,9 @@
*/
package ghidra.program.model.block;
import java.util.ArrayList;
import java.util.LinkedList;
import ghidra.program.model.address.*;
import ghidra.program.model.listing.Listing;
import ghidra.program.model.listing.Program;
@ -23,15 +25,12 @@ import ghidra.program.model.symbol.*;
import ghidra.util.exception.CancelledException;
import ghidra.util.task.TaskMonitor;
import java.util.ArrayList;
import java.util.LinkedList;
/**
* <CODE>OverlapCodeSubModel</CODE> (O-model) defines subroutines with a
* unique entry point, which may share code with other subroutines. Each entry-
* point may either be a source or called entry-point and is identified using
* the MultEntSubModel. This model defines the set of addresses contained
* within each subroutine based upon the possible flows from its' entry- point.
* within each subroutine based upon the possible flows from its entry- point.
* Flows which encounter another entry-point are terminated.
* <P>
* NOTE: This differs from the original definition of an entry point, however,
@ -104,20 +103,25 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
// Build model-O subroutine from basic blocks
while (!todoList.isEmpty()) {
if (monitor.isCancelled())
throw new CancelledException();
if (monitor.isCancelled()) {
throw new CancelledException();
}
// Get basic block at the specified address
Address a = todoList.removeLast();
if (addrSet.contains(a))
continue; // already processed this block
{
continue; // already processed this block
}
CodeBlock bblock = bbModel.getFirstCodeBlockContaining(a, monitor);
if (bblock == null)
continue;
if (bblock == null) {
continue;
}
// Verify that the block contains instructions
if (listing.getInstructionAt(a) == null)
continue;
if (listing.getInstructionAt(a) == null) {
continue;
}
// Add basic block to subroutine address set
addrSet.add(bblock);
@ -140,7 +144,8 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
/**
* @see ghidra.program.model.block.CodeBlockModel#getCodeBlockAt(ghidra.program.model.address.Address, ghidra.util.task.TaskMonitor)
*/
public CodeBlock getCodeBlockAt(Address addr, TaskMonitor monitor) throws CancelledException {
@Override
public CodeBlock getCodeBlockAt(Address addr, TaskMonitor monitor) throws CancelledException {
// First check out the Block cache
CodeBlock block = foundOSubs.getBlockAt(addr);
@ -170,7 +175,8 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
* contains the address empty array otherwise.
* @throws CancelledException if the monitor cancels the operation.
*/
public CodeBlock[] getCodeBlocksContaining(Address addr, TaskMonitor monitor) throws CancelledException {
@Override
public CodeBlock[] getCodeBlocksContaining(Address addr, TaskMonitor monitor) throws CancelledException {
// First check out the Block cache
CodeBlock[] blocks = foundOSubs.getBlocksContaining(addr);
@ -179,8 +185,9 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
}
CodeBlock modelMSub = modelM.getFirstCodeBlockContaining(addr, monitor);
if (modelMSub == null)
return emptyBlockArray;
if (modelMSub == null) {
return emptyBlockArray;
}
Address[] entPts = modelMSub.getStartAddresses();
// Single-entry MSub same as OSub
@ -195,8 +202,9 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
ArrayList<CodeBlock> blockList = new ArrayList<CodeBlock>();
for (int i = 0; i < cnt; i++) {
CodeBlock block = getSubroutine(entPts[i], monitor);
if (block.contains(addr))
blockList.add(block);
if (block.contains(addr)) {
blockList.add(block);
}
}
return blockList.toArray(new CodeBlock[blockList.size()]);
}
@ -205,7 +213,8 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
/**
* @see ghidra.program.model.block.CodeBlockModel#getFirstCodeBlockContaining(ghidra.program.model.address.Address, ghidra.util.task.TaskMonitor)
*/
public CodeBlock getFirstCodeBlockContaining(Address addr, TaskMonitor monitor) throws CancelledException {
@Override
public CodeBlock getFirstCodeBlockContaining(Address addr, TaskMonitor monitor) throws CancelledException {
// First check out the Block cache
CodeBlock block = foundOSubs.getFirstBlockContaining(addr);
@ -214,8 +223,9 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
}
CodeBlock modelMSub = modelM.getFirstCodeBlockContaining(addr, monitor);
if (modelMSub == null)
return null;
if (modelMSub == null) {
return null;
}
Address[] entPts = modelMSub.getStartAddresses();
// Single-entry MSub same as OSub
@ -227,8 +237,9 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
// Return first OSub which contains addr
for (int i = 0; i < cnt; i++) {
block = getSubroutine(entPts[i], monitor);
if (block != null && block.contains(addr))
return block;
if (block != null && block.contains(addr)) {
return block;
}
}
return null;
}
@ -236,14 +247,16 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
/**
* @see ghidra.program.model.block.CodeBlockModel#getCodeBlocks(ghidra.util.task.TaskMonitor)
*/
public CodeBlockIterator getCodeBlocks(TaskMonitor monitor) throws CancelledException {
@Override
public CodeBlockIterator getCodeBlocks(TaskMonitor monitor) throws CancelledException {
return new SingleEntSubIterator(this, monitor);
}
/**
* @see ghidra.program.model.block.CodeBlockModel#getCodeBlocksContaining(ghidra.program.model.address.AddressSetView, ghidra.util.task.TaskMonitor)
*/
public CodeBlockIterator getCodeBlocksContaining(AddressSetView addrSet, TaskMonitor monitor) throws CancelledException {
@Override
public CodeBlockIterator getCodeBlocksContaining(AddressSetView addrSet, TaskMonitor monitor) throws CancelledException {
return new SingleEntSubIterator(this, addrSet, monitor);
}
@ -259,7 +272,8 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
/**
* @see ghidra.program.model.block.CodeBlockModel#getProgram()
*/
public Program getProgram() {
@Override
public Program getProgram() {
return program;
}
@ -274,14 +288,16 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
/**
* @see ghidra.program.model.block.CodeBlockModel#getName(ghidra.program.model.block.CodeBlock)
*/
public String getName(CodeBlock block) {
@Override
public String getName(CodeBlock block) {
// get the start address for the block
// look up the symbol in the symbol table.
// it should have one if anyone calls it.
// if not, make up a label
if (!(block.getModel() instanceof OverlapCodeSubModel))
throw new IllegalArgumentException();
if (!(block.getModel() instanceof OverlapCodeSubModel)) {
throw new IllegalArgumentException();
}
Address start = block.getFirstStartAddress();
@ -309,7 +325,8 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
*
* @return flow type of this node
*/
public FlowType getFlowType(CodeBlock block) {
@Override
public FlowType getFlowType(CodeBlock block) {
/* If there are multiple unique ways out of the node, then we
should return FlowType.UNKNOWN (or FlowType.MULTIFLOW ?).
Possible considerations for the future which are particularly
@ -318,8 +335,9 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
(as opposed to jumping within the subroutine).
Might want to consider FlowType.MULTITERMINAL for multiple returns? */
if (!(block.getModel() instanceof OverlapCodeSubModel))
throw new IllegalArgumentException();
if (!(block.getModel() instanceof OverlapCodeSubModel)) {
throw new IllegalArgumentException();
}
return RefType.FLOW;
}
@ -327,10 +345,12 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
/**
* @see ghidra.program.model.block.CodeBlockModel#getSources(ghidra.program.model.block.CodeBlock, ghidra.util.task.TaskMonitor)
*/
public CodeBlockReferenceIterator getSources(CodeBlock block, TaskMonitor monitor) throws CancelledException {
@Override
public CodeBlockReferenceIterator getSources(CodeBlock block, TaskMonitor monitor) throws CancelledException {
if (!(block.getModel() instanceof OverlapCodeSubModel))
throw new IllegalArgumentException();
if (!(block.getModel() instanceof OverlapCodeSubModel)) {
throw new IllegalArgumentException();
}
return new SubroutineSourceReferenceIterator(block, monitor);
}
@ -338,10 +358,12 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
/**
* @see ghidra.program.model.block.CodeBlockModel#getNumSources(ghidra.program.model.block.CodeBlock, ghidra.util.task.TaskMonitor)
*/
public int getNumSources(CodeBlock block, TaskMonitor monitor) throws CancelledException {
@Override
public int getNumSources(CodeBlock block, TaskMonitor monitor) throws CancelledException {
if (!(block.getModel() instanceof OverlapCodeSubModel))
throw new IllegalArgumentException();
if (!(block.getModel() instanceof OverlapCodeSubModel)) {
throw new IllegalArgumentException();
}
return SubroutineSourceReferenceIterator.getNumSources(block, monitor);
}
@ -349,7 +371,8 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
/**
* @see ghidra.program.model.block.CodeBlockModel#getDestinations(ghidra.program.model.block.CodeBlock, ghidra.util.task.TaskMonitor)
*/
public CodeBlockReferenceIterator getDestinations(CodeBlock block, TaskMonitor monitor) throws CancelledException {
@Override
public CodeBlockReferenceIterator getDestinations(CodeBlock block, TaskMonitor monitor) throws CancelledException {
// destinations of Fallthroughs are the follow on block
// destinations of all others are the instruction's operand referents
@ -367,8 +390,9 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
// nor that any destination is a good destination unless the instruction
// is looked at.
if (!(block.getModel() instanceof OverlapCodeSubModel))
throw new IllegalArgumentException();
if (!(block.getModel() instanceof OverlapCodeSubModel)) {
throw new IllegalArgumentException();
}
return new SubroutineDestReferenceIterator(block, monitor);
}
@ -376,10 +400,12 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
/**
* @see ghidra.program.model.block.CodeBlockModel#getNumDestinations(ghidra.program.model.block.CodeBlock, ghidra.util.task.TaskMonitor)
*/
public int getNumDestinations(CodeBlock block, TaskMonitor monitor) throws CancelledException {
@Override
public int getNumDestinations(CodeBlock block, TaskMonitor monitor) throws CancelledException {
if (!(block.getModel() instanceof OverlapCodeSubModel))
throw new IllegalArgumentException();
if (!(block.getModel() instanceof OverlapCodeSubModel)) {
throw new IllegalArgumentException();
}
return SubroutineDestReferenceIterator.getNumDestinations(block, monitor);
}
@ -393,8 +419,9 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
*/
protected CodeBlock createSub(AddressSetView addrSet, Address entryPt) {
if (addrSet.isEmpty())
if (addrSet.isEmpty()) {
return null;
}
Address[] entryPts = new Address[1];
entryPts[0] = entryPt;
@ -408,6 +435,7 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
/**
* @see ghidra.program.model.block.CodeBlockModel#getBasicBlockModel()
*/
@Override
public CodeBlockModel getBasicBlockModel() {
return modelM.getBasicBlockModel();
}
@ -415,6 +443,7 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
/**
* @see ghidra.program.model.block.CodeBlockModel#getName()
*/
@Override
public String getName() {
return OVERLAP_MODEL_NAME;
}
@ -422,6 +451,7 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
/**
* @see ghidra.program.model.block.SubroutineBlockModel#getBaseSubroutineModel()
*/
@Override
public SubroutineBlockModel getBaseSubroutineModel() {
return modelM;
}
@ -429,10 +459,12 @@ public class OverlapCodeSubModel implements SubroutineBlockModel {
/**
* @see ghidra.program.model.block.CodeBlockModel#allowsBlockOverlap()
*/
@Override
public boolean allowsBlockOverlap() {
return true;
}
@Override
public boolean externalsIncluded() {
return modelM.externalsIncluded();
}

View File

@ -17,7 +17,6 @@ package ghidra.program.model.data;
import java.net.URL;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.docking.settings.Settings;
import ghidra.program.model.mem.MemBuffer;
import ghidra.util.InvalidNameException;

View File

@ -48,10 +48,11 @@ public class ArrayDataType extends DataTypeImpl implements Array {
* @param dataType the dataType of the elements in the array.
* @param numElements the number of elements in the array.
* @param elementLength the length of an individual element in the array.
* @param dtm datatype manager or null
*/
public ArrayDataType(DataType dataType, int numElements, int elementLength,
DataTypeManager dtm) {
super(CategoryPath.ROOT, "array", dtm);
super(dataType.getCategoryPath(), "array", dtm);
validate(dataType);
if (dataType.getDataTypeManager() != dtm) {
dataType = dataType.clone(dtm);
@ -100,9 +101,6 @@ public class ArrayDataType extends DataTypeImpl implements Array {
@Override
public boolean isEquivalent(DataType obj) {
if (obj == null) {
return false;
}
if (obj == this) {
return true;
}

View File

@ -29,7 +29,7 @@ import utilities.util.ArrayUtilities;
* for use within data structures. The length (i.e., storage size) of this bitfield datatype is
* the minimum number of bytes required to contain the bitfield at its specified offset.
* The effective bit-size of a bitfield will be limited by the size of the base
* datatype whose size may be controlled by its' associated datatype manager and data organization
* datatype whose size may be controlled by its associated datatype manager and data organization
* (e.g., {@link IntegerDataType}).
* <p>
* NOTE: Instantiation of this datatype implementation is intended for internal use only.

View File

@ -20,7 +20,6 @@ import java.util.List;
import javax.swing.event.ChangeListener;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.framework.ShutdownHookRegistry;
import ghidra.framework.ShutdownPriority;
import ghidra.util.*;

View File

@ -73,11 +73,24 @@ public interface Composite extends DataType {
public void setDescription(String desc);
/**
* Gets the number of component data types in this data type.
* @return the number of components that make up this data prototype
* Gets the number of component data types in this composite.
* The count will include all undefined filler components which may be present
* within an unaligned structure. Structures do not include the
* optional trailing flexible array component in this count
* (see {@link Structure#hasFlexibleArrayComponent()}).
* @return the number of components that make up this composite
*/
public abstract int getNumComponents();
/**
* Returns the number of explicitly defined components in this composite.
* For Unions and aligned Structures this is equivalent to {@link #getNumComponents()}
* since they do not contain undefined components. The count will exclude all undefined
* filler components which may be present within an unaligned structure.
* @return the number of explicitly defined components in this composite
*/
public abstract int getNumDefinedComponents();
/**
* Returns the component of this data type with the indicated ordinal.
* @param ordinal the component's ordinal (zero based).
@ -87,11 +100,25 @@ public interface Composite extends DataType {
public abstract DataTypeComponent getComponent(int ordinal);
/**
* Returns an array of Data Type Components that make up this data type.
* Returns an array of length 0 if there are no subcomponents.
* Returns an array of Data Type Components that make up this composite including
* undefined filler components which may be present within an unaligned structure.
* The number of components corresponds to {@link #getNumComponents()}.
* @return list all components
*/
public abstract DataTypeComponent[] getComponents();
/**
* Returns an array of Data Type Components that make up this composite excluding
* undefined filler components which may be present within an unaligned structure.
* The number of components corresponds to {@link #getNumComponents()}. For Unions and
* aligned Structures this is equivalent to {@link #getComponents()}
* since they do not contain undefined components. Structures do not include the
* optional trailing flexible array component in this list
* (see {@link Structure#getFlexibleArrayComponent()}).
* @return list all explicitly defined components
*/
public abstract DataTypeComponent[] getDefinedComponents();
/**
* Adds a new datatype to the end of this composite. This is the preferred method
* to use for adding components to an aligned structure for fixed-length dataTypes.
@ -102,7 +129,7 @@ public interface Composite extends DataType {
* For example, suppose dt1 contains dt2. Therefore it is not valid
* to add dt1 to dt2 since this would cause a cyclic dependency.
*/
public DataTypeComponent add(DataType dataType);
public DataTypeComponent add(DataType dataType) throws IllegalArgumentException;
/**
* Adds a new datatype to the end of this composite. This is the preferred method
@ -118,7 +145,7 @@ public interface Composite extends DataType {
* For example, suppose dt1 contains dt2. Therefore it is not valid
* to add dt1 to dt2 since this would cause a cyclic dependency.
*/
public DataTypeComponent add(DataType dataType, int length);
public DataTypeComponent add(DataType dataType, int length) throws IllegalArgumentException;
/**
* Adds a new datatype to the end of this composite. This is the preferred method
@ -132,7 +159,8 @@ public interface Composite extends DataType {
* For example, suppose dt1 contains dt2. Therefore it is not valid
* to add dt1 to dt2 since this would cause a cyclic dependency.
*/
public DataTypeComponent add(DataType dataType, String name, String comment);
public DataTypeComponent add(DataType dataType, String name, String comment)
throws IllegalArgumentException;
/**
* Adds a new bitfield to the end of this composite. This method is intended
@ -166,7 +194,8 @@ public interface Composite extends DataType {
* For example, suppose dt1 contains dt2. Therefore it is not valid
* to add dt1 to dt2 since this would cause a cyclic dependency.
*/
public DataTypeComponent add(DataType dataType, int length, String name, String comment);
public DataTypeComponent add(DataType dataType, int length, String name, String comment)
throws IllegalArgumentException;
/**
* Inserts a new datatype at the specified ordinal position in this composite.
@ -181,7 +210,7 @@ public interface Composite extends DataType {
* to insert dt1 to dt2 since this would cause a cyclic dependency.
* @throws ArrayIndexOutOfBoundsException if component ordinal is out of bounds
*/
public DataTypeComponent insert(int ordinal, DataType dataType);
public DataTypeComponent insert(int ordinal, DataType dataType) throws IllegalArgumentException;
/**
* Inserts a new datatype at the specified ordinal position in this composite.
@ -199,7 +228,8 @@ public interface Composite extends DataType {
* to insert dt1 to dt2 since this would cause a cyclic dependency.
* @throws ArrayIndexOutOfBoundsException if component ordinal is out of bounds
*/
public DataTypeComponent insert(int ordinal, DataType dataType, int length);
public DataTypeComponent insert(int ordinal, DataType dataType, int length)
throws IllegalArgumentException;
/**
* Inserts a new datatype at the specified ordinal position in this composite.
@ -220,7 +250,7 @@ public interface Composite extends DataType {
* @throws ArrayIndexOutOfBoundsException if component ordinal is out of bounds
*/
public DataTypeComponent insert(int ordinal, DataType dataType, int length, String name,
String comment);
String comment) throws ArrayIndexOutOfBoundsException, IllegalArgumentException;
/**
* Deletes the component at the given ordinal position.
@ -229,7 +259,7 @@ public interface Composite extends DataType {
* @param ordinal the ordinal of the component to be deleted.
* @throws ArrayIndexOutOfBoundsException if component ordinal is out of bounds
*/
public void delete(int ordinal);
public void delete(int ordinal) throws ArrayIndexOutOfBoundsException;
/**
* Deletes the components at the given ordinal positions.
@ -238,7 +268,7 @@ public interface Composite extends DataType {
* @param ordinals the ordinals of the component to be deleted.
* @throws ArrayIndexOutOfBoundsException if any specified component ordinal is out of bounds
*/
public void delete(int[] ordinals);
public void delete(int[] ordinals) throws ArrayIndexOutOfBoundsException;
/**
* Check if a data type is part of this data type. A data type could

View File

@ -22,7 +22,7 @@ public class CompositeAlignmentHelper {
int allComponentsLCM = 1;
int packingAlignment = composite.getPackingValue();
DataTypeComponent[] dataTypeComponents = composite.getComponents();
DataTypeComponent[] dataTypeComponents = composite.getDefinedComponents();
for (DataTypeComponent dataTypeComponent : dataTypeComponents) {
int impartedAlignment = CompositeAlignmentHelper.getPackedAlignment(dataOrganization,
packingAlignment, dataTypeComponent);

View File

@ -15,7 +15,6 @@
*/
package ghidra.program.model.data;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.docking.settings.Settings;
import ghidra.program.database.data.DataTypeUtilities;
import ghidra.program.model.mem.MemBuffer;
@ -25,26 +24,35 @@ import ghidra.util.exception.AssertException;
import ghidra.util.exception.NotYetImplementedException;
/**
* Common implementation methods for structure and union
* Common implementation methods for structure and union
*/
public abstract class CompositeDataTypeImpl extends GenericDataType implements Composite {
private final static long serialVersionUID = 1;
private String description;
protected boolean aligned = false; //WARNING, changing the initial value for this will cause
protected boolean aligned = false; // WARNING, changing the initial value for this will cause
// subtle errors - One I know of is in the StructureDataType
// copyComponent method. It has built in assumptions about this.
// copyComponent method. It has built in assumptions about this.
protected AlignmentType alignmentType = AlignmentType.DEFAULT_ALIGNED;
protected int packingValue = NOT_PACKING;
protected int externalAlignment = DEFAULT_ALIGNMENT_VALUE;
/**
* Creates an empty CompositeDataType with the specified name.
* @param path the category path indicating where this data type is located.
* @param name the data type's name
* @param dataTypeManager the data type manager associated with this data type. This can be null.
* Also, the data type manager may not contain this actual data type.
* Construct a new composite with the given name
*
* @param path the category path indicating where this
* data type is located.
* @param name the name of the new structure
* @param universalID the id for the data type
* @param sourceArchive the source archive for this data type
* @param lastChangeTime the last time this data type was changed
* @param lastChangeTimeInSourceArchive the last time this data type was changed
* in its source archive.
* @param dtm the data type manager associated with
* this data type. This can be null. Also,
* the data type manager may not yet
* contain this actual data type.
*/
CompositeDataTypeImpl(CategoryPath path, String name, UniversalID universalID,
SourceArchive sourceArchive, long lastChangeTime, long lastChangeTimeInSourceArchive,
@ -60,13 +68,15 @@ public abstract class CompositeDataTypeImpl extends GenericDataType implements C
}
/**
* Get the preferred length for a new component. For Unions and internally aligned
* structures the preferred component length for a fixed-length dataType will be the
* length of that dataType. Otherwise the length returned will be no larger than the
* specified length.
* Get the preferred length for a new component. For Unions and internally
* aligned structures the preferred component length for a fixed-length dataType
* will be the length of that dataType. Otherwise the length returned will be no
* larger than the specified length.
*
* @param dataType new component datatype
* @param length constrained length or -1 to force use of dataType size. Dynamic types
* such as string must have a positive length specified.
* @param length constrained length or -1 to force use of dataType size.
* Dynamic types such as string must have a positive length
* specified.
* @return preferred component length
*/
protected int getPreferredComponentLength(DataType dataType, int length) {
@ -98,15 +108,15 @@ public abstract class CompositeDataTypeImpl extends GenericDataType implements C
}
/**
* This method throws an exception if the indicated data type is an ancestor
* of this data type. In other words, the specified data type has a component
* or sub-component containing this data type.
* This method throws an exception if the indicated data type is an ancestor of
* this data type. In other words, the specified data type has a component or
* sub-component containing this data type.
*
* @param dataType the data type
* @throws IllegalArgumentException if the data type is an ancestor of this
* data type.
* @throws IllegalArgumentException if the data type is an ancestor of this data
* type.
*/
protected void checkAncestry(DataType dataType) {
// TODO: cyclic checks are easily bypassed by renaming multiple composite instances
protected void checkAncestry(DataType dataType) throws IllegalArgumentException {
if (this.equals(dataType)) {
throw new IllegalArgumentException(
"Data type " + getDisplayName() + " can't contain itself.");
@ -118,8 +128,9 @@ public abstract class CompositeDataTypeImpl extends GenericDataType implements C
}
/**
* This method throws an exception if the indicated data type is not
* a valid data type for a component of this composite data type.
* This method throws an exception if the indicated data type is not a valid
* data type for a component of this composite data type.
*
* @param dataType the data type to be checked.
* @throws IllegalArgumentException if the data type is invalid.
*/
@ -143,11 +154,12 @@ public abstract class CompositeDataTypeImpl extends GenericDataType implements C
/**
* Handle replacement of datatype which may impact bitfield datatype.
*
* @param bitfieldComponent bitfield component
* @param oldDt affected datatype which has been removed or replaced
* @param newDt replacement datatype
* @param oldDt affected datatype which has been removed or replaced
* @param newDt replacement datatype
* @return true if bitfield component was modified
* @throws InvalidDataTypeException if new datatype is not
* @throws InvalidDataTypeException if new datatype is not
*/
protected boolean updateBitFieldDataType(DataTypeComponentImpl bitfieldComponent,
DataType oldDt, DataType newDt) throws InvalidDataTypeException {
@ -323,7 +335,8 @@ public abstract class CompositeDataTypeImpl extends GenericDataType implements C
}
/**
* Notify any parent data types that this composite data type's alignment has changed.
* Notify any parent data types that this composite data type's alignment has
* changed.
*/
protected void notifyAlignmentChanged() {
DataType[] parents = getParents();
@ -336,10 +349,11 @@ public abstract class CompositeDataTypeImpl extends GenericDataType implements C
}
/**
* Adjusts the internal alignment of components within this composite based on the current
* settings of the internal alignment, packing, alignment type and minimum alignment value.
* This method should be called whenever any of the above settings are changed or whenever
* a components data type is changed or a component is added or removed.
* Adjusts the internal alignment of components within this composite based on
* the current settings of the internal alignment, packing, alignment type and
* minimum alignment value. This method should be called whenever any of the
* above settings are changed or whenever a components data type is changed or a
* component is added or removed.
*/
protected abstract void adjustInternalAlignment();
@ -373,11 +387,14 @@ public abstract class CompositeDataTypeImpl extends GenericDataType implements C
/**
* Dump all components for use in {@link #toString()} representation.
*
* @param buffer string buffer
* @param pad padding to be used with each component output line
* @param pad padding to be used with each component output line
*/
protected void dumpComponents(StringBuilder buffer, String pad) {
for (DataTypeComponent dtc : getComponents()) {
// limit output of filler components for unaligned structures
DataTypeComponent[] components = getDefinedComponents();
for (DataTypeComponent dtc : components) {
DataType dataType = dtc.getDataType();
buffer.append(pad + dtc.getOffset());
buffer.append(pad + dataType.getName());

View File

@ -558,14 +558,7 @@ public class DataOrganizationImpl implements DataOrganization {
// Check each component and get the least common multiple of their forced minimum alignments.
int componentForcedLCM = 0;
DataTypeComponent[] dataTypeComponents;
if (composite instanceof Structure) {
dataTypeComponents = ((Structure) composite).getDefinedComponents();
}
else {
dataTypeComponents = composite.getComponents();
}
for (DataTypeComponent dataTypeComponent : dataTypeComponents) {
for (DataTypeComponent dataTypeComponent : composite.getDefinedComponents()) {
if (dataTypeComponent.isBitFieldComponent()) {
continue;
}

View File

@ -17,7 +17,6 @@ package ghidra.program.model.data;
import java.net.URL;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.docking.settings.Settings;
import ghidra.docking.settings.SettingsDefinition;
import ghidra.program.model.mem.MemBuffer;
@ -261,6 +260,9 @@ public interface DataType {
/**
* Returns true if the given dataType is equivalent to this dataType. The
* precise meaning of "equivalent" is dataType dependent.
* <br>
* NOTE: if invoked by a DB object or manager it should be invoked on the
* DataTypeDB object passing the other datatype as the argument.
* @param dt the dataType being tested for equivalence.
* @return true if the if the given dataType is equivalent to this dataType.
*/

View File

@ -15,8 +15,6 @@
*/
package ghidra.program.model.data;
import java.util.*;
import ghidra.util.Msg;
public abstract class DataTypeConflictHandler {
@ -61,13 +59,6 @@ public abstract class DataTypeConflictHandler {
RENAME_AND_ADD, USE_EXISTING, REPLACE_EXISTING;
}
/**
* Due to the locking concerns which can arise with a DataTypeConflictHandler,
* definition of new implementations must be done here.
*/
private DataTypeConflictHandler() {
}
public final static DataTypeConflictHandler DEFAULT_HANDLER = new DataTypeConflictHandler() {
@Override
public ConflictResult resolveConflict(DataType addedDataType, DataType existingDataType) {
@ -168,281 +159,52 @@ public abstract class DataTypeConflictHandler {
};
/**
* This {@link DataTypeConflictHandler conflict handler} attempts to match conflicting
* {@link Composite composite data types} (structure or union) when they have compatible
* data layouts. (Data types that are exactly equiv will not be subjected to conflict
* handling and will never reach here)
* <p>
* A default/empty sized structure, or structures with the same size are candidates
* for matching.
* <p>
* Structures that have a subset of the other's field definition are candidates for matching.
* <p>
* When a candidate data type is matched with an existing data type, this conflict handler
* will specify that the new data type is:<p>
* <ul>
* <li>discarded and replaced by the existing data type ({@link ConflictResult#USE_EXISTING})
* <li>used to overwrite the existing data type ({@link ConflictResult#REPLACE_EXISTING})
* </ul>
* or the candidate data type was <b>NOT</b> matched with an existing data type, and the new data type is:<p>
* <ul>
* <li>kept, but renamed with a .conflictNNNN suffix to make it unique ({@link ConflictResult#RENAME_AND_ADD})
* </ul>
* <b>NOTE:</b> structures with alignment (instead of being statically laid out) are not
* treated specially and will not match other aligned or non-aligned structures.
*
* This {@link DataTypeConflictHandler conflict handler} behaves similar to
* the {@link #DEFAULT_HANDLER} with the difference being that a
* empty composite (see {@link Composite#isNotYetDefined()}) will be
* replaced by a similar non-empty composite type. Alignment (e.g., packing)
* is not considered when determining conflict resolution.
* <br>
* For datatypes originating from a source archive with matching ID, the
* replacment strategy will utilize the implementation with the
* latest timestamp.
* <br>
* Unlike the {@link #DEFAULT_HANDLER}, follow-on dependency datatype
* resolutions will retain the same conflict resolution strategy.
*/
public final static DataTypeConflictHandler REPLACE_EMPTY_STRUCTS_OR_RENAME_AND_ADD_HANDLER =
new DataTypeConflictHandler() {
/**
* Returns true if src can overwrite the target composite based on size
* @param src
* @param target
* @return
*/
private boolean isSizeCompatible(Composite src, Composite target) {
return (target.getLength() <= 1) || (src.getLength() == target.getLength());
}
/**
* Returns true if the {@link Composite composite} is empty (to get around the lying that
* {@link Composite#getLength()} does.)
* @param composite
* @return
*/
private boolean isCompositeEmpty(Composite composite) {
return composite.getLength() <= 1 && composite.getNumComponents() == 0;
}
/**
* Determines if the given composite is filled with default values (all components are default).
* @param composite composite to check
* @return true if default and false otherwise
*/
private boolean isCompositeDefault(Composite composite) {
if (composite.getLength() == composite.getNumComponents()) {
DataTypeComponent[] comps = composite.getComponents();
boolean isDefault = true;
for (int i = 0; i < comps.length; i++) {
if (comps[i].getDataType() != DataType.DEFAULT) {
isDefault = false;
break;
}
}
if (isDefault) {
return true;
}
}
return false;
}
private boolean isCompositePart(Composite full, Composite part,
Map<DataType, DataType> visitedDataTypes) {
if (full instanceof Structure && part instanceof Structure) {
return isStructurePart((Structure) full, (Structure) part, visitedDataTypes);
}
else if (full instanceof Union && part instanceof Union) {
return isUnionPart((Union) full, (Union) part, visitedDataTypes);
}
else {
return false;
}
}
/**
* Returns true if one union is a subset of another union.
* <p>
* Each component of the candidate partial union must be present in the
* 'full' union and must be 'equiv'.
* <p>
* Order of components is ignored, except for unnamed components, which receive
* a default name created using their ordinal position.
*
* @param full {@link Union} datatype that is expected to be a superset of the next param.
* @param part {@link Union} datatype that is expected to be a subset of the previous param.
* @param visitedDataTypes identity map of datatypes to prevent loops.
* @return true if part is a subset (or equal) to full.
*/
private boolean isUnionPart(Union full, Union part,
Map<DataType, DataType> visitedDataTypes) {
if (full.getLength() < part.getLength()) {
return false;
}
Map<String, DataTypeComponent> fullComponentsByName = new HashMap<>();
for (DataTypeComponent dtc : full.getComponents()) {
String name = dtc.getFieldName();
if (name == null) {
name = dtc.getDefaultFieldName();
}
fullComponentsByName.put(name, dtc);
}
for (DataTypeComponent dtc : part.getComponents()) {
String name = dtc.getFieldName();
if (name == null) {
name = dtc.getDefaultFieldName();
}
DataTypeComponent fullDTC = fullComponentsByName.get(name);
if (fullDTC == null) {
return false;
}
DataType partDT = dtc.getDataType();
DataType fullDT = fullDTC.getDataType();
if (doRelaxedCompare(partDT, fullDT,
visitedDataTypes) == ConflictResult.RENAME_AND_ADD) {
return false;
}
}
return true;
}
/*
* Returns true if one structure is a partial definition of another structure.
* <p>
* Each defined component in the candidate partial structure must be present
* in the 'full' structure and must be equiv.
* <p>
* The order and sparseness of the candidate partial structure is not important,
* only that all of its defined components are present in the full structure.
* <p>
*/
private boolean isStructurePart(Structure full, Structure part,
Map<DataType, DataType> visitedDataTypes) {
// Both structures should be equal in length
if (full.getLength() != part.getLength()) {
return false;
}
boolean[] fullCompsUsedFlag = new boolean[full.getComponents().length];
DataTypeComponent[] partComps = part.getDefinedComponents();
// Find a match in the full structure's component list for each
// component in the partial structure.
// Use resolveConflict() == USE_EXISTING to test for equiv in addition to
// isEquiv().
// Ensure that two components in the partial struct don't map to the same
// component in the full structure.
for (int i = 0; i < partComps.length; i++) {
DataTypeComponent partDTC = partComps[i];
DataTypeComponent fullDTCAt = full.getComponentAt(partDTC.getOffset());
int fullOrd = fullDTCAt.getOrdinal();
if (fullCompsUsedFlag[fullOrd]) {
return false;
}
DataType partDT = partDTC.getDataType();
DataType fullDT = fullDTCAt.getDataType();
if (doRelaxedCompare(partDT, fullDT,
visitedDataTypes) == ConflictResult.RENAME_AND_ADD) {
return false;
}
fullCompsUsedFlag[fullOrd] = true;
}
return true;
}
/*
* Strict compare will compare its parameters.
* The contents of these datatypes (ie. contents of structs, pointers, arrays)
* will be compared with relaxed typedef checking.
*/
private ConflictResult doStrictCompare(DataType addedDataType,
DataType existingDataType, Map<DataType, DataType> visitedDataTypes) {
visitedDataTypes.put(existingDataType, addedDataType);
if (existingDataType.isEquivalent(addedDataType)) {
private ConflictResult resolveConflictReplaceEmpty(DataType addedDataType,
DataType existingDataType) {
if (addedDataType.isNotYetDefined()) {
return ConflictResult.USE_EXISTING;
}
else if (existingDataType instanceof Composite &&
addedDataType instanceof Composite) {
Composite existingComposite = (Composite) existingDataType;
Composite addedComposite = (Composite) addedDataType;
// Check to see if we are adding a default/empty data type
if ((isCompositeEmpty(addedComposite) || isCompositeDefault(addedComposite)) &&
isSizeCompatible(existingComposite, addedComposite)) {
return ConflictResult.USE_EXISTING;
}
// Check to see if the existing type is a default/empty data type
if ((isCompositeEmpty(existingComposite) ||
isCompositeDefault(existingComposite)) &&
isSizeCompatible(addedComposite, existingComposite)) {
return ConflictResult.REPLACE_EXISTING;
}
// Check to see if the added type is part of the existing type first to
// generate more USE_EXISTINGS when possible.
if (isCompositePart(existingComposite, addedComposite, visitedDataTypes)) {
return ConflictResult.USE_EXISTING;
}
// Check to see if the existing type is a part of the added type
if (isCompositePart(addedComposite, existingComposite, visitedDataTypes)) {
return ConflictResult.REPLACE_EXISTING;
}
if (existingDataType.isNotYetDefined()) {
return ConflictResult.REPLACE_EXISTING;
}
else if (existingDataType instanceof TypeDef && addedDataType instanceof TypeDef) {
TypeDef addedTypeDef = (TypeDef) addedDataType;
TypeDef existingTypeDef = (TypeDef) existingDataType;
return doRelaxedCompare(addedTypeDef.getBaseDataType(),
existingTypeDef.getBaseDataType(), visitedDataTypes);
}
else if (existingDataType instanceof Array && addedDataType instanceof Array) {
Array addedArray = (Array) addedDataType;
Array existingArray = (Array) existingDataType;
if (addedArray.getNumElements() != existingArray.getNumElements() ||
addedArray.getElementLength() != existingArray.getElementLength()) {
return ConflictResult.RENAME_AND_ADD;
}
return doRelaxedCompare(addedArray.getDataType(), existingArray.getDataType(),
visitedDataTypes);
}
return ConflictResult.RENAME_AND_ADD;
}
/*
* Relaxed compare will take liberties in skipping typedefs to try to compare
* the types that the typedef are hiding. This is useful when comparing types
* that were embedded in differently compiled files, where you might end up with
* a raw basetype in one file and a typedef to a basetype in another file.
*/
private ConflictResult doRelaxedCompare(DataType addedDataType,
DataType existingDataType, Map<DataType, DataType> visitedDataTypes) {
if (existingDataType instanceof Pointer && addedDataType instanceof Pointer) {
DataType ptrAddedDataType = ((Pointer) addedDataType).getDataType();
DataType ptrExistingDataType = ((Pointer) existingDataType).getDataType();
// only descend into the pointed-to-type if we haven't looked at it before.
// if you don't do this, you will have a stack-overflow issue when a struct
// has a pointer to its same type.
if (!visitedDataTypes.containsKey(ptrExistingDataType)) {
visitedDataTypes.put(ptrExistingDataType, ptrAddedDataType);
addedDataType = ptrAddedDataType;
existingDataType = ptrExistingDataType;
}
}
// unwrap typedefs, possibly asymmetrically. (ie. only unwrap added vs. existing)
if (addedDataType instanceof TypeDef) {
addedDataType = ((TypeDef) addedDataType).getBaseDataType();
}
if (existingDataType instanceof TypeDef) {
existingDataType = ((TypeDef) existingDataType).getBaseDataType();
}
return doStrictCompare(addedDataType, existingDataType, visitedDataTypes);
}
@Override
public ConflictResult resolveConflict(DataType addedDataType,
DataType existingDataType) {
IdentityHashMap<DataType, DataType> visitedDataTypes = new IdentityHashMap<>();
return doStrictCompare(addedDataType, existingDataType, visitedDataTypes);
if (addedDataType instanceof Structure) {
if (existingDataType instanceof Structure) {
return resolveConflictReplaceEmpty(addedDataType, existingDataType);
}
}
else if (addedDataType instanceof Union) {
if (existingDataType instanceof Union) {
return resolveConflictReplaceEmpty(addedDataType, existingDataType);
}
}
return ConflictResult.RENAME_AND_ADD;
}
@Override
public boolean shouldUpdate(DataType sourceDataType, DataType localDataType) {
return false;
return sourceDataType.getLastChangeTime() > localDataType.getLastChangeTime();
}
@Override

View File

@ -21,7 +21,6 @@ import java.util.Iterator;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.docking.settings.*;
import ghidra.util.*;

View File

@ -17,9 +17,9 @@ package ghidra.program.model.data;
import java.util.*;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.util.InvalidNameException;
import ghidra.util.UniversalID;
import ghidra.util.exception.CancelledException;
import ghidra.util.task.TaskMonitor;
/**
@ -97,6 +97,18 @@ public interface DataTypeManager {
*/
public DataType addDataType(DataType dataType, DataTypeConflictHandler handler);
/**
* Sequentially adds a collection of datatypes to this data manager.
* This method provides the added benefit of equivalence caching
* for improved performance.
* @param dataTypes collection of datatypes
* @param handler conflict handler
* @param monitor task monitor
* @throws CancelledException if monitor is cancelled
*/
public void addDataTypes(Collection<DataType> dataTypes, DataTypeConflictHandler handler,
TaskMonitor monitor) throws CancelledException;
/**
* Returns an iterator over all the dataTypes in this manager
* @return an iterator over all the dataTypes in this manager

View File

@ -15,8 +15,6 @@
*/
package ghidra.program.model.data;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
/**
* The listener interface for notification of changes to a DataTypeManager
*/

View File

@ -15,8 +15,6 @@
*/
package ghidra.program.model.data;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
/**
* Adapter for a Category change listener.
*/

View File

@ -15,7 +15,6 @@
*/
package ghidra.program.model.data;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.util.datastruct.WeakDataStructureFactory;
import ghidra.util.datastruct.WeakSet;

View File

@ -19,7 +19,6 @@ import java.io.IOException;
import java.io.Writer;
import java.util.*;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.util.Msg;
import ghidra.util.exception.CancelledException;
import ghidra.util.task.TaskMonitor;
@ -352,7 +351,7 @@ public class DataTypeWriter {
}
private boolean containsComposite(Composite container, Composite contained) {
for (DataTypeComponent component : container.getComponents()) {
for (DataTypeComponent component : container.getDefinedComponents()) {
DataType dt = getBaseArrayTypedefType(component.getDataType());
if (dt instanceof Composite && dt.getName().equals(contained.getName()) &&
dt.isEquivalent(contained)) {

View File

@ -18,7 +18,6 @@ package ghidra.program.model.data;
import java.math.BigInteger;
import java.util.*;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.docking.settings.Settings;
import ghidra.docking.settings.SettingsDefinition;
import ghidra.program.database.data.DataTypeUtilities;

View File

@ -17,7 +17,6 @@ package ghidra.program.model.data;
import java.util.ArrayList;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.docking.settings.Settings;
import ghidra.program.database.data.DataTypeUtilities;
import ghidra.program.model.lang.PrototypeModel;
@ -304,10 +303,10 @@ public class FunctionDefinitionDataType extends GenericDataType implements Funct
if (dt == this) {
return true;
}
if (!(dt instanceof FunctionSignature)) {
if (!(dt instanceof FunctionDefinition)) {
return false;
}
return isEquivalentSignature((FunctionSignature) dt);
return isEquivalentSignature((FunctionDefinition) dt);
}
@Override
@ -409,4 +408,9 @@ public class FunctionDefinitionDataType extends GenericDataType implements Funct
return false;
}
@Override
public String toString() {
return getPrototypeString(true);
}
}

View File

@ -15,7 +15,6 @@
*/
package ghidra.program.model.data;
import ghidra.app.plugin.core.datamgr.archive.SourceArchive;
import ghidra.util.InvalidNameException;
import ghidra.util.UniversalID;
import ghidra.util.exception.DuplicateNameException;

Some files were not shown because too many files have changed in this diff Show More