mirror of
https://github.com/NationalSecurityAgency/ghidra.git
synced 2024-11-24 13:11:47 +00:00
GP-3839: Various speed improvements for Trace RMI
This commit is contained in:
parent
bc24351495
commit
b34aaa4952
@ -367,7 +367,7 @@ def refresh_modules(node: sch.Schema('ModuleContainer')):
|
||||
|
||||
|
||||
# node is Module so this appears in Modules panel
|
||||
@REGISTRY.method(display='Load all Modules and all Sections')
|
||||
@REGISTRY.method(display='Refresh all Modules and all Sections')
|
||||
def load_all_sections(node: sch.Schema('Module')):
|
||||
"""
|
||||
Load/refresh all modules and all sections.
|
||||
|
@ -51,8 +51,7 @@ public interface LocationTracker {
|
||||
* @param coordinates the trace, thread, snap, etc., of the tool
|
||||
* @return the address to navigate to
|
||||
*/
|
||||
CompletableFuture<Address> computeTraceAddress(PluginTool tool,
|
||||
DebuggerCoordinates coordinates);
|
||||
Address computeTraceAddress(PluginTool tool, DebuggerCoordinates coordinates);
|
||||
|
||||
/**
|
||||
* Get the suggested input if the user activates "Go To" while this tracker is active
|
||||
|
@ -484,10 +484,10 @@ public class TraceRmiHandler implements TraceRmiConnection {
|
||||
RootMessage.Builder dispatch(RootMessage req, RootMessage.Builder rep) throws Exception;
|
||||
|
||||
default RootMessage handle(RootMessage req) {
|
||||
/*String desc = toString(req);
|
||||
String desc = toString(req);
|
||||
if (desc != null) {
|
||||
TimedMsg.debug(this, "HANDLING: " + desc);
|
||||
}*/
|
||||
}
|
||||
RootMessage.Builder rep = RootMessage.newBuilder();
|
||||
try {
|
||||
rep = dispatch(req, rep);
|
||||
@ -514,12 +514,12 @@ public class TraceRmiHandler implements TraceRmiConnection {
|
||||
case REQUEST_START_TX -> "startTx(%d,%s)".formatted(
|
||||
req.getRequestStartTx().getTxid().getId(),
|
||||
req.getRequestStartTx().getDescription());
|
||||
case REQUEST_SET_VALUE -> "setValue(%d,%s,%s,=%s)".formatted(
|
||||
/*case REQUEST_SET_VALUE -> "setValue(%d,%s,%s,=%s)".formatted(
|
||||
req.getRequestSetValue().getValue().getParent().getId(),
|
||||
req.getRequestSetValue().getValue().getParent().getPath().getPath(),
|
||||
req.getRequestSetValue().getValue().getKey(),
|
||||
ValueDecoder.DISPLAY
|
||||
.toValue(req.getRequestSetValue().getValue().getValue()));
|
||||
.toValue(req.getRequestSetValue().getValue().getValue()));*/
|
||||
default -> null;
|
||||
};
|
||||
}
|
||||
|
@ -62,6 +62,7 @@ import ghidra.util.exception.CancelledException;
|
||||
import resources.MultiIcon;
|
||||
|
||||
public interface DebuggerResources {
|
||||
|
||||
String OPTIONS_CATEGORY_DEBUGGER = "Debugger";
|
||||
String OPTIONS_CATEGORY_WORKFLOW = "Workflow";
|
||||
|
||||
|
@ -19,7 +19,6 @@ import java.awt.Color;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.math.BigInteger;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
import docking.ActionContext;
|
||||
import docking.ComponentProvider;
|
||||
@ -34,13 +33,13 @@ import ghidra.app.plugin.core.debug.gui.colors.MultiSelectionBlendedLayoutBackgr
|
||||
import ghidra.app.plugin.core.debug.gui.listing.DebuggerTrackedRegisterListingBackgroundColorModel;
|
||||
import ghidra.app.util.viewer.listingpanel.ListingBackgroundColorModel;
|
||||
import ghidra.app.util.viewer.listingpanel.ListingPanel;
|
||||
import ghidra.async.AsyncUtils;
|
||||
import ghidra.debug.api.action.*;
|
||||
import ghidra.debug.api.action.LocationTrackingSpec.TrackingSpecConfigFieldCodec;
|
||||
import ghidra.debug.api.tracemgr.DebuggerCoordinates;
|
||||
import ghidra.framework.options.SaveState;
|
||||
import ghidra.framework.plugintool.*;
|
||||
import ghidra.framework.plugintool.annotation.AutoConfigStateField;
|
||||
import ghidra.program.model.address.Address;
|
||||
import ghidra.program.util.ProgramLocation;
|
||||
import ghidra.trace.model.*;
|
||||
import ghidra.trace.model.stack.TraceStack;
|
||||
@ -252,7 +251,7 @@ public class DebuggerTrackLocationTrait {
|
||||
doTrack();
|
||||
}
|
||||
|
||||
protected CompletableFuture<ProgramLocation> computeTrackedLocation() {
|
||||
protected ProgramLocation computeTrackedLocation() {
|
||||
// Change of register values (for current frame)
|
||||
// Change of stack pc (for current frame)
|
||||
// Change of current view (if not caused by goTo)
|
||||
@ -262,16 +261,18 @@ public class DebuggerTrackLocationTrait {
|
||||
// Change of tracking settings
|
||||
DebuggerCoordinates cur = current;
|
||||
if (cur.getView() == null) {
|
||||
return AsyncUtils.nil();
|
||||
return null;
|
||||
}
|
||||
TraceThread thread = cur.getThread();
|
||||
if (thread == null || spec == null) {
|
||||
return AsyncUtils.nil();
|
||||
return null;
|
||||
}
|
||||
// NB: view's snap may be forked for emulation
|
||||
return tracker.computeTraceAddress(tool, cur).thenApply(address -> {
|
||||
return address == null ? null : new ProgramLocation(cur.getView(), address);
|
||||
});
|
||||
Address address = tracker.computeTraceAddress(tool, cur);
|
||||
if (address == null) {
|
||||
return null;
|
||||
}
|
||||
return new ProgramLocation(cur.getView(), address);
|
||||
}
|
||||
|
||||
public String computeLabelText() {
|
||||
@ -282,13 +283,13 @@ public class DebuggerTrackLocationTrait {
|
||||
}
|
||||
|
||||
protected void doTrack() {
|
||||
computeTrackedLocation().thenAccept(loc -> {
|
||||
trackedLocation = loc;
|
||||
try {
|
||||
trackedLocation = computeTrackedLocation();
|
||||
locationTracked();
|
||||
}).exceptionally(ex -> {
|
||||
}
|
||||
catch (Throwable ex) {
|
||||
Msg.error(this, "Error while computing location: " + ex);
|
||||
return null;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
protected void addNewListeners() {
|
||||
|
@ -15,12 +15,9 @@
|
||||
*/
|
||||
package ghidra.app.plugin.core.debug.gui.action;
|
||||
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
import javax.swing.Icon;
|
||||
|
||||
import ghidra.app.plugin.core.debug.gui.DebuggerResources.TrackLocationAction;
|
||||
import ghidra.async.AsyncUtils;
|
||||
import ghidra.debug.api.action.*;
|
||||
import ghidra.debug.api.tracemgr.DebuggerCoordinates;
|
||||
import ghidra.framework.plugintool.PluginTool;
|
||||
@ -66,9 +63,8 @@ public enum NoneLocationTrackingSpec implements LocationTrackingSpec, LocationTr
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletableFuture<Address> computeTraceAddress(PluginTool tool,
|
||||
DebuggerCoordinates coordinates) {
|
||||
return AsyncUtils.nil();
|
||||
public Address computeTraceAddress(PluginTool tool, DebuggerCoordinates coordinates) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -15,8 +15,6 @@
|
||||
*/
|
||||
package ghidra.app.plugin.core.debug.gui.action;
|
||||
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
import javax.swing.Icon;
|
||||
|
||||
import ghidra.app.plugin.core.debug.gui.DebuggerResources.TrackLocationAction;
|
||||
@ -67,7 +65,8 @@ public enum PCByStackLocationTrackingSpec implements LocationTrackingSpec, Locat
|
||||
return this;
|
||||
}
|
||||
|
||||
public Address doComputeTraceAddress(PluginTool tool, DebuggerCoordinates coordinates) {
|
||||
@Override
|
||||
public Address computeTraceAddress(PluginTool tool, DebuggerCoordinates coordinates) {
|
||||
Trace trace = coordinates.getTrace();
|
||||
TraceThread thread = coordinates.getThread();
|
||||
long snap = coordinates.getSnap();
|
||||
@ -83,16 +82,10 @@ public enum PCByStackLocationTrackingSpec implements LocationTrackingSpec, Locat
|
||||
return frame.getProgramCounter(snap);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletableFuture<Address> computeTraceAddress(PluginTool tool,
|
||||
DebuggerCoordinates coordinates) {
|
||||
return CompletableFuture.supplyAsync(() -> doComputeTraceAddress(tool, coordinates));
|
||||
}
|
||||
|
||||
@Override
|
||||
public GoToInput getDefaultGoToInput(PluginTool tool, DebuggerCoordinates coordinates,
|
||||
ProgramLocation location) {
|
||||
Address address = doComputeTraceAddress(tool, coordinates);
|
||||
Address address = computeTraceAddress(tool, coordinates);
|
||||
if (address == null) {
|
||||
return NoneLocationTrackingSpec.INSTANCE.getDefaultGoToInput(tool, coordinates,
|
||||
location);
|
||||
|
@ -15,8 +15,6 @@
|
||||
*/
|
||||
package ghidra.app.plugin.core.debug.gui.action;
|
||||
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
import javax.swing.Icon;
|
||||
|
||||
import ghidra.app.plugin.core.debug.gui.DebuggerResources.TrackLocationAction;
|
||||
@ -70,17 +68,14 @@ public enum PCLocationTrackingSpec implements LocationTrackingSpec, LocationTrac
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletableFuture<Address> computeTraceAddress(PluginTool tool,
|
||||
DebuggerCoordinates coordinates) {
|
||||
return CompletableFuture.supplyAsync(() -> {
|
||||
if (coordinates.getTime().isSnapOnly()) {
|
||||
Address pc = BY_STACK.doComputeTraceAddress(tool, coordinates);
|
||||
if (pc != null) {
|
||||
return pc;
|
||||
}
|
||||
public Address computeTraceAddress(PluginTool tool, DebuggerCoordinates coordinates) {
|
||||
if (coordinates.getTime().isSnapOnly()) {
|
||||
Address pc = BY_STACK.computeTraceAddress(tool, coordinates);
|
||||
if (pc != null) {
|
||||
return pc;
|
||||
}
|
||||
return BY_REG.doComputeTraceAddress(tool, coordinates);
|
||||
});
|
||||
}
|
||||
return BY_REG.computeTraceAddress(tool, coordinates);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -15,8 +15,6 @@
|
||||
*/
|
||||
package ghidra.app.plugin.core.debug.gui.action;
|
||||
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
import ghidra.debug.api.action.*;
|
||||
import ghidra.debug.api.tracemgr.DebuggerCoordinates;
|
||||
import ghidra.framework.plugintool.PluginTool;
|
||||
@ -52,7 +50,8 @@ public interface RegisterLocationTrackingSpec extends LocationTrackingSpec, Loca
|
||||
return this;
|
||||
}
|
||||
|
||||
default Address doComputeTraceAddress(PluginTool tool, DebuggerCoordinates coordinates) {
|
||||
@Override
|
||||
default Address computeTraceAddress(PluginTool tool, DebuggerCoordinates coordinates) {
|
||||
Trace trace = coordinates.getTrace();
|
||||
TracePlatform platform = coordinates.getPlatform();
|
||||
TraceThread thread = coordinates.getThread();
|
||||
@ -88,12 +87,6 @@ public interface RegisterLocationTrackingSpec extends LocationTrackingSpec, Loca
|
||||
.getAddress(value.getUnsignedValue().longValue(), true));
|
||||
}
|
||||
|
||||
@Override
|
||||
default CompletableFuture<Address> computeTraceAddress(PluginTool tool,
|
||||
DebuggerCoordinates coordinates) {
|
||||
return CompletableFuture.supplyAsync(() -> doComputeTraceAddress(tool, coordinates));
|
||||
}
|
||||
|
||||
@Override
|
||||
default GoToInput getDefaultGoToInput(PluginTool tool, DebuggerCoordinates coordinates,
|
||||
ProgramLocation location) {
|
||||
|
@ -16,13 +16,11 @@
|
||||
package ghidra.app.plugin.core.debug.gui.action;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
import javax.swing.Icon;
|
||||
|
||||
import ghidra.app.plugin.core.debug.gui.DebuggerResources;
|
||||
import ghidra.app.plugin.core.debug.gui.DebuggerResources.TrackLocationAction;
|
||||
import ghidra.async.AsyncUtils;
|
||||
import ghidra.debug.api.action.*;
|
||||
import ghidra.debug.api.tracemgr.DebuggerCoordinates;
|
||||
import ghidra.debug.api.watch.WatchRow;
|
||||
@ -115,28 +113,25 @@ public class WatchLocationTrackingSpec implements LocationTrackingSpec {
|
||||
class WatchLocationTracker implements LocationTracker {
|
||||
private AddressSetView reads;
|
||||
private DebuggerCoordinates current = DebuggerCoordinates.NOWHERE;
|
||||
private PcodeExecutor<WatchValue> asyncExec = null;
|
||||
private PcodeExecutor<WatchValue> exec = null;
|
||||
private PcodeExpression compiled;
|
||||
|
||||
@Override
|
||||
public CompletableFuture<Address> computeTraceAddress(PluginTool tool,
|
||||
DebuggerCoordinates coordinates) {
|
||||
if (!Objects.equals(current, coordinates) || asyncExec == null) {
|
||||
public Address computeTraceAddress(PluginTool tool, DebuggerCoordinates coordinates) {
|
||||
if (!Objects.equals(current, coordinates) || exec == null) {
|
||||
current = coordinates;
|
||||
asyncExec = current.getPlatform() == null ? null
|
||||
exec = current.getPlatform() == null ? null
|
||||
: DebuggerPcodeUtils.buildWatchExecutor(tool, coordinates);
|
||||
}
|
||||
else {
|
||||
asyncExec.getState().clear();
|
||||
exec.getState().clear();
|
||||
}
|
||||
if (current.getTrace() == null) {
|
||||
return AsyncUtils.nil();
|
||||
return null;
|
||||
}
|
||||
return CompletableFuture.supplyAsync(() -> {
|
||||
compiled = DebuggerPcodeUtils.compileExpression(tool, current, expression);
|
||||
WatchValue value = compiled.evaluate(asyncExec);
|
||||
return value == null ? null : value.address();
|
||||
});
|
||||
compiled = DebuggerPcodeUtils.compileExpression(tool, current, expression);
|
||||
WatchValue value = compiled.evaluate(exec);
|
||||
return value == null ? null : value.address();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -20,7 +20,6 @@ import ghidra.app.plugin.core.debug.AbstractDebuggerPlugin;
|
||||
import ghidra.app.plugin.core.debug.DebuggerPluginPackage;
|
||||
import ghidra.app.plugin.core.debug.event.*;
|
||||
import ghidra.app.services.DebuggerLogicalBreakpointService;
|
||||
import ghidra.app.services.DebuggerModelService;
|
||||
import ghidra.framework.plugintool.*;
|
||||
import ghidra.framework.plugintool.util.PluginStatus;
|
||||
|
||||
|
@ -259,48 +259,40 @@ public class DebuggerListingPlugin extends AbstractCodeBrowserPlugin<DebuggerLis
|
||||
|
||||
@Override
|
||||
public void processEvent(PluginEvent event) {
|
||||
if (event instanceof ProgramLocationPluginEvent) {
|
||||
if (event instanceof ProgramLocationPluginEvent ev) {
|
||||
cbProgramLocationEvents.invoke(() -> {
|
||||
ProgramLocationPluginEvent ev = (ProgramLocationPluginEvent) event;
|
||||
if (heedLocationEvent(ev)) {
|
||||
connectedProvider.staticProgramLocationChanged(ev.getLocation());
|
||||
}
|
||||
});
|
||||
}
|
||||
if (event instanceof ProgramSelectionPluginEvent) {
|
||||
if (event instanceof ProgramSelectionPluginEvent ev) {
|
||||
cbProgramSelectionEvents.invoke(() -> {
|
||||
ProgramSelectionPluginEvent ev = (ProgramSelectionPluginEvent) event;
|
||||
if (heedSelectionEvent(ev)) {
|
||||
connectedProvider.staticProgramSelectionChanged(ev.getProgram(),
|
||||
ev.getSelection());
|
||||
}
|
||||
});
|
||||
}
|
||||
if (event instanceof ProgramOpenedPluginEvent) {
|
||||
ProgramOpenedPluginEvent ev = (ProgramOpenedPluginEvent) event;
|
||||
if (event instanceof ProgramOpenedPluginEvent ev) {
|
||||
allProviders(p -> p.programOpened(ev.getProgram()));
|
||||
}
|
||||
if (event instanceof ProgramClosedPluginEvent) {
|
||||
ProgramClosedPluginEvent ev = (ProgramClosedPluginEvent) event;
|
||||
if (event instanceof ProgramClosedPluginEvent ev) {
|
||||
allProviders(p -> p.programClosed(ev.getProgram()));
|
||||
}
|
||||
if (event instanceof ProgramActivatedPluginEvent) {
|
||||
ProgramActivatedPluginEvent ev = (ProgramActivatedPluginEvent) event;
|
||||
if (event instanceof ProgramActivatedPluginEvent ev) {
|
||||
allProviders(p -> p.staticProgramActivated(ev.getActiveProgram()));
|
||||
}
|
||||
if (event instanceof TraceActivatedPluginEvent) {
|
||||
TraceActivatedPluginEvent ev = (TraceActivatedPluginEvent) event;
|
||||
if (event instanceof TraceActivatedPluginEvent ev) {
|
||||
current = ev.getActiveCoordinates();
|
||||
allProviders(p -> p.coordinatesActivated(current));
|
||||
}
|
||||
if (event instanceof TraceClosedPluginEvent) {
|
||||
TraceClosedPluginEvent ev = (TraceClosedPluginEvent) event;
|
||||
if (event instanceof TraceClosedPluginEvent ev) {
|
||||
if (current.getTrace() == ev.getTrace()) {
|
||||
current = DebuggerCoordinates.NOWHERE;
|
||||
}
|
||||
allProviders(p -> p.traceClosed(ev.getTrace()));
|
||||
}
|
||||
// TODO: Sync selection and highlights?
|
||||
}
|
||||
|
||||
void fireStaticLocationEvent(ProgramLocation staticLoc) {
|
||||
|
@ -1159,24 +1159,22 @@ public class DebuggerListingProvider extends CodeViewerProvider {
|
||||
}
|
||||
|
||||
protected void doGoToTracked() {
|
||||
ProgramLocation loc = trackingTrait.getTrackedLocation();
|
||||
ProgramLocation trackedStatic = doMarkTrackedLocation();
|
||||
if (loc == null) {
|
||||
return;
|
||||
}
|
||||
TraceProgramView curView = current.getView();
|
||||
if (!syncTrait.isAutoSyncCursorWithStaticListing() || trackedStatic == null) {
|
||||
Swing.runIfSwingOrRunLater(() -> {
|
||||
Swing.runIfSwingOrRunLater(() -> {
|
||||
ProgramLocation loc = trackingTrait.getTrackedLocation();
|
||||
ProgramLocation trackedStatic = doMarkTrackedLocation();
|
||||
if (loc == null) {
|
||||
return;
|
||||
}
|
||||
TraceProgramView curView = current.getView();
|
||||
if (!syncTrait.isAutoSyncCursorWithStaticListing() || trackedStatic == null) {
|
||||
if (curView != current.getView()) {
|
||||
// Trace changed before Swing scheduled us
|
||||
return;
|
||||
}
|
||||
goToAndUpdateTrackingLabel(curView, loc);
|
||||
doCheckCurrentModuleMissing();
|
||||
});
|
||||
}
|
||||
else {
|
||||
Swing.runIfSwingOrRunLater(() -> {
|
||||
}
|
||||
else {
|
||||
if (curView != current.getView()) {
|
||||
// Trace changed before Swing scheduled us
|
||||
return;
|
||||
@ -1184,8 +1182,8 @@ public class DebuggerListingProvider extends CodeViewerProvider {
|
||||
goToAndUpdateTrackingLabel(curView, loc);
|
||||
doCheckCurrentModuleMissing();
|
||||
plugin.fireStaticLocationEvent(trackedStatic);
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
protected void doAutoDisassemble(Address start) {
|
||||
|
@ -150,13 +150,11 @@ public class DebuggerMemoryBytesPlugin
|
||||
|
||||
@Override
|
||||
public void processEvent(PluginEvent event) {
|
||||
if (event instanceof TraceActivatedPluginEvent) {
|
||||
TraceActivatedPluginEvent ev = (TraceActivatedPluginEvent) event;
|
||||
if (event instanceof TraceActivatedPluginEvent ev) {
|
||||
current = ev.getActiveCoordinates();
|
||||
allProviders(p -> p.coordinatesActivated(current));
|
||||
}
|
||||
if (event instanceof TraceClosedPluginEvent) {
|
||||
TraceClosedPluginEvent ev = (TraceClosedPluginEvent) event;
|
||||
if (event instanceof TraceClosedPluginEvent ev) {
|
||||
if (current.getTrace() == ev.getTrace()) {
|
||||
current = DebuggerCoordinates.NOWHERE;
|
||||
}
|
||||
|
@ -66,20 +66,16 @@ public class DebuggerRegionsPlugin extends AbstractDebuggerPlugin {
|
||||
@Override
|
||||
public void processEvent(PluginEvent event) {
|
||||
super.processEvent(event);
|
||||
if (event instanceof ProgramActivatedPluginEvent) {
|
||||
ProgramActivatedPluginEvent ev = (ProgramActivatedPluginEvent) event;
|
||||
if (event instanceof ProgramActivatedPluginEvent ev) {
|
||||
provider.setProgram(ev.getActiveProgram());
|
||||
}
|
||||
else if (event instanceof ProgramLocationPluginEvent) {
|
||||
ProgramLocationPluginEvent ev = (ProgramLocationPluginEvent) event;
|
||||
else if (event instanceof ProgramLocationPluginEvent ev) {
|
||||
provider.setLocation(ev.getLocation());
|
||||
}
|
||||
else if (event instanceof ProgramClosedPluginEvent) {
|
||||
ProgramClosedPluginEvent ev = (ProgramClosedPluginEvent) event;
|
||||
else if (event instanceof ProgramClosedPluginEvent ev) {
|
||||
provider.programClosed(ev.getProgram());
|
||||
}
|
||||
else if (event instanceof TraceActivatedPluginEvent) {
|
||||
TraceActivatedPluginEvent ev = (TraceActivatedPluginEvent) event;
|
||||
else if (event instanceof TraceActivatedPluginEvent ev) {
|
||||
provider.coordinatesActivated(ev.getActiveCoordinates());
|
||||
}
|
||||
}
|
||||
|
@ -26,22 +26,21 @@ import ghidra.framework.plugintool.*;
|
||||
import ghidra.framework.plugintool.util.PluginStatus;
|
||||
import ghidra.program.model.listing.Program;
|
||||
|
||||
@PluginInfo( //
|
||||
shortDescription = "Displays memory vs time", //
|
||||
description = "Provides visualiztion/navigation across time/address axes", //
|
||||
category = PluginCategoryNames.DEBUGGER, //
|
||||
packageName = DebuggerPluginPackage.NAME, //
|
||||
status = PluginStatus.RELEASED, //
|
||||
eventsConsumed = { //
|
||||
TraceActivatedPluginEvent.class //
|
||||
}, //
|
||||
servicesRequired = { //
|
||||
DebuggerTraceManagerService.class //
|
||||
}, //
|
||||
servicesProvided = { //
|
||||
MemviewService.class //
|
||||
} //
|
||||
)
|
||||
@PluginInfo(
|
||||
shortDescription = "Displays memory vs time",
|
||||
description = "Provides visualiztion/navigation across time/address axes",
|
||||
category = PluginCategoryNames.DEBUGGER,
|
||||
packageName = DebuggerPluginPackage.NAME,
|
||||
status = PluginStatus.RELEASED,
|
||||
eventsConsumed = {
|
||||
TraceActivatedPluginEvent.class
|
||||
},
|
||||
servicesRequired = {
|
||||
DebuggerTraceManagerService.class
|
||||
},
|
||||
servicesProvided = {
|
||||
MemviewService.class
|
||||
})
|
||||
public class DebuggerMemviewPlugin extends AbstractDebuggerPlugin implements MemviewService {
|
||||
|
||||
protected MemviewProvider provider;
|
||||
@ -67,12 +66,12 @@ public class DebuggerMemviewPlugin extends AbstractDebuggerPlugin implements Mem
|
||||
@Override
|
||||
public void processEvent(PluginEvent event) {
|
||||
super.processEvent(event);
|
||||
if (event instanceof TraceActivatedPluginEvent) {
|
||||
TraceActivatedPluginEvent ev = (TraceActivatedPluginEvent) event;
|
||||
if (event instanceof TraceActivatedPluginEvent ev) {
|
||||
listener.coordinatesActivated(ev.getActiveCoordinates());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public MemviewProvider getProvider() {
|
||||
return provider;
|
||||
}
|
||||
|
@ -65,20 +65,16 @@ public class DebuggerModulesPlugin extends AbstractDebuggerPlugin {
|
||||
@Override
|
||||
public void processEvent(PluginEvent event) {
|
||||
super.processEvent(event);
|
||||
if (event instanceof ProgramActivatedPluginEvent) {
|
||||
ProgramActivatedPluginEvent ev = (ProgramActivatedPluginEvent) event;
|
||||
if (event instanceof ProgramActivatedPluginEvent ev) {
|
||||
provider.setProgram(ev.getActiveProgram());
|
||||
}
|
||||
else if (event instanceof ProgramLocationPluginEvent) {
|
||||
ProgramLocationPluginEvent ev = (ProgramLocationPluginEvent) event;
|
||||
else if (event instanceof ProgramLocationPluginEvent ev) {
|
||||
provider.setLocation(ev.getLocation());
|
||||
}
|
||||
else if (event instanceof ProgramClosedPluginEvent) {
|
||||
ProgramClosedPluginEvent ev = (ProgramClosedPluginEvent) event;
|
||||
else if (event instanceof ProgramClosedPluginEvent ev) {
|
||||
provider.programClosed(ev.getProgram());
|
||||
}
|
||||
else if (event instanceof TraceActivatedPluginEvent) {
|
||||
TraceActivatedPluginEvent ev = (TraceActivatedPluginEvent) event;
|
||||
else if (event instanceof TraceActivatedPluginEvent ev) {
|
||||
provider.coordinatesActivated(ev.getActiveCoordinates());
|
||||
}
|
||||
}
|
||||
|
@ -64,12 +64,10 @@ public class DebuggerStaticMappingPlugin extends AbstractDebuggerPlugin {
|
||||
@Override
|
||||
public void processEvent(PluginEvent event) {
|
||||
super.processEvent(event);
|
||||
if (event instanceof TraceActivatedPluginEvent) {
|
||||
TraceActivatedPluginEvent ev = (TraceActivatedPluginEvent) event;
|
||||
if (event instanceof TraceActivatedPluginEvent ev) {
|
||||
provider.setTrace(ev.getActiveCoordinates().getTrace());
|
||||
}
|
||||
if (event instanceof ProgramActivatedPluginEvent) {
|
||||
ProgramActivatedPluginEvent ev = (ProgramActivatedPluginEvent) event;
|
||||
if (event instanceof ProgramActivatedPluginEvent ev) {
|
||||
provider.setProgram(ev.getActiveProgram());
|
||||
}
|
||||
}
|
||||
|
@ -112,38 +112,32 @@ public class DebuggerObjectsPlugin extends AbstractDebuggerPlugin
|
||||
provider.traceOpened(ev.getTrace());
|
||||
}
|
||||
}
|
||||
else if (event instanceof TraceActivatedPluginEvent) {
|
||||
TraceActivatedPluginEvent ev = (TraceActivatedPluginEvent) event;
|
||||
else if (event instanceof TraceActivatedPluginEvent ev) {
|
||||
for (DebuggerObjectsProvider provider : providers) {
|
||||
provider.traceActivated(ev.getActiveCoordinates());
|
||||
}
|
||||
}
|
||||
else if (event instanceof TraceClosedPluginEvent) {
|
||||
TraceClosedPluginEvent ev = (TraceClosedPluginEvent) event;
|
||||
else if (event instanceof TraceClosedPluginEvent ev) {
|
||||
for (DebuggerObjectsProvider provider : providers) {
|
||||
provider.traceClosed(ev.getTrace());
|
||||
}
|
||||
}
|
||||
else if (event instanceof ModelActivatedPluginEvent) {
|
||||
ModelActivatedPluginEvent ev = (ModelActivatedPluginEvent) event;
|
||||
else if (event instanceof ModelActivatedPluginEvent ev) {
|
||||
for (DebuggerObjectsProvider provider : providers) {
|
||||
provider.modelActivated(ev.getActiveModel());
|
||||
}
|
||||
}
|
||||
else if (event instanceof ProgramActivatedPluginEvent) {
|
||||
ProgramActivatedPluginEvent ev = (ProgramActivatedPluginEvent) event;
|
||||
else if (event instanceof ProgramActivatedPluginEvent ev) {
|
||||
for (DebuggerObjectsProvider provider : providers) {
|
||||
provider.setProgram(ev.getActiveProgram());
|
||||
}
|
||||
}
|
||||
else if (event instanceof ProgramOpenedPluginEvent) {
|
||||
ProgramOpenedPluginEvent ev = (ProgramOpenedPluginEvent) event;
|
||||
else if (event instanceof ProgramOpenedPluginEvent ev) {
|
||||
for (DebuggerObjectsProvider provider : providers) {
|
||||
provider.setProgram(ev.getProgram());
|
||||
}
|
||||
}
|
||||
else if (event instanceof ProgramSelectionPluginEvent) {
|
||||
ProgramSelectionPluginEvent ev = (ProgramSelectionPluginEvent) event;
|
||||
else if (event instanceof ProgramSelectionPluginEvent ev) {
|
||||
for (DebuggerObjectsProvider provider : providers) {
|
||||
provider.setProgram(ev.getProgram());
|
||||
}
|
||||
|
@ -53,8 +53,7 @@ public class DebuggerPcodeStepperPlugin extends AbstractDebuggerPlugin {
|
||||
@Override
|
||||
public void processEvent(PluginEvent event) {
|
||||
super.processEvent(event);
|
||||
if (event instanceof TraceActivatedPluginEvent) {
|
||||
TraceActivatedPluginEvent ev = (TraceActivatedPluginEvent) event;
|
||||
if (event instanceof TraceActivatedPluginEvent ev) {
|
||||
provider.coordinatesActivated(ev.getActiveCoordinates());
|
||||
}
|
||||
}
|
||||
|
@ -29,7 +29,8 @@ import ghidra.app.plugin.PluginCategoryNames;
|
||||
import ghidra.app.plugin.core.debug.DebuggerPluginPackage;
|
||||
import ghidra.app.plugin.core.debug.event.*;
|
||||
import ghidra.app.plugin.core.debug.gui.DebuggerResources;
|
||||
import ghidra.app.plugin.core.debug.mapping.*;
|
||||
import ghidra.app.plugin.core.debug.mapping.DebuggerPlatformOffer;
|
||||
import ghidra.app.plugin.core.debug.mapping.DebuggerPlatformOpinion;
|
||||
import ghidra.app.services.DebuggerPlatformService;
|
||||
import ghidra.app.services.DebuggerTraceManagerService;
|
||||
import ghidra.debug.api.platform.DebuggerPlatformMapper;
|
||||
@ -307,14 +308,14 @@ public class DebuggerPlatformPlugin extends Plugin {
|
||||
@Override
|
||||
public void processEvent(PluginEvent event) {
|
||||
super.processEvent(event);
|
||||
if (event instanceof TraceActivatedPluginEvent evt) {
|
||||
coordinatesActivated(evt.getActiveCoordinates());
|
||||
if (event instanceof TraceActivatedPluginEvent ev) {
|
||||
coordinatesActivated(ev.getActiveCoordinates());
|
||||
}
|
||||
if (event instanceof TraceClosedPluginEvent evt) {
|
||||
traceClosed(evt.getTrace());
|
||||
if (event instanceof TraceClosedPluginEvent ev) {
|
||||
traceClosed(ev.getTrace());
|
||||
}
|
||||
if (event instanceof DebuggerPlatformPluginEvent evt) {
|
||||
mapperActivated(evt.getTrace(), evt.getMapper());
|
||||
if (event instanceof DebuggerPlatformPluginEvent ev) {
|
||||
mapperActivated(ev.getTrace(), ev.getMapper());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -101,12 +101,10 @@ public class DebuggerRegistersPlugin extends AbstractDebuggerPlugin {
|
||||
@Override
|
||||
public void processEvent(PluginEvent event) {
|
||||
super.processEvent(event);
|
||||
if (event instanceof TraceActivatedPluginEvent) {
|
||||
TraceActivatedPluginEvent ev = (TraceActivatedPluginEvent) event;
|
||||
if (event instanceof TraceActivatedPluginEvent ev) {
|
||||
connectedProvider.coordinatesActivated(ev.getActiveCoordinates());
|
||||
}
|
||||
if (event instanceof TraceClosedPluginEvent) {
|
||||
TraceClosedPluginEvent ev = (TraceClosedPluginEvent) event;
|
||||
if (event instanceof TraceClosedPluginEvent ev) {
|
||||
traceClosed(ev.getTrace());
|
||||
}
|
||||
}
|
||||
|
@ -18,7 +18,8 @@ package ghidra.app.plugin.core.debug.gui.thread;
|
||||
import ghidra.app.plugin.PluginCategoryNames;
|
||||
import ghidra.app.plugin.core.debug.AbstractDebuggerPlugin;
|
||||
import ghidra.app.plugin.core.debug.DebuggerPluginPackage;
|
||||
import ghidra.app.plugin.core.debug.event.*;
|
||||
import ghidra.app.plugin.core.debug.event.TraceActivatedPluginEvent;
|
||||
import ghidra.app.plugin.core.debug.event.TraceOpenedPluginEvent;
|
||||
import ghidra.app.services.DebuggerTraceManagerService;
|
||||
import ghidra.framework.plugintool.*;
|
||||
import ghidra.framework.plugintool.util.PluginStatus;
|
||||
@ -57,8 +58,7 @@ public class DebuggerThreadsPlugin extends AbstractDebuggerPlugin {
|
||||
@Override
|
||||
public void processEvent(PluginEvent event) {
|
||||
super.processEvent(event);
|
||||
if (event instanceof TraceActivatedPluginEvent) {
|
||||
TraceActivatedPluginEvent ev = (TraceActivatedPluginEvent) event;
|
||||
if (event instanceof TraceActivatedPluginEvent ev) {
|
||||
provider.coordinatesActivated(ev.getActiveCoordinates());
|
||||
}
|
||||
}
|
||||
|
@ -21,6 +21,7 @@ import java.util.Collection;
|
||||
import java.util.Objects;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.swing.*;
|
||||
import javax.swing.table.*;
|
||||
@ -141,7 +142,7 @@ public class DebuggerSnapshotTablePanel extends JPanel {
|
||||
public Component getTableCellRendererComponent(GTableCellRenderingData data) {
|
||||
super.getTableCellRendererComponent(data);
|
||||
SnapshotRow row = (SnapshotRow) data.getRowObject();
|
||||
if (row != null && row.getSnap() == currentSnap) {
|
||||
if (row != null && currentSnap != null && currentSnap.longValue() == row.getSnap()) {
|
||||
setBold();
|
||||
}
|
||||
return this;
|
||||
@ -154,7 +155,7 @@ public class DebuggerSnapshotTablePanel extends JPanel {
|
||||
protected boolean hideScratch = true;
|
||||
|
||||
private Trace currentTrace;
|
||||
private Long currentSnap;
|
||||
private volatile Long currentSnap;
|
||||
|
||||
protected final SnapshotListener listener = new SnapshotListener();
|
||||
|
||||
@ -237,8 +238,11 @@ public class DebuggerSnapshotTablePanel extends JPanel {
|
||||
Collection<? extends TraceSnapshot> snapshots =
|
||||
hideScratch ? manager.getSnapshots(0, true, Long.MAX_VALUE, true)
|
||||
: manager.getAllSnapshots();
|
||||
snapshotTableModel
|
||||
.addAll(snapshots.stream().map(s -> new SnapshotRow(currentTrace, s)).toList());
|
||||
// Use .collect instead of .toList to avoid size/sync issues
|
||||
// Even though access is synchronized, size may change during iteration
|
||||
snapshotTableModel.addAll(snapshots.stream()
|
||||
.map(s -> new SnapshotRow(currentTrace, s))
|
||||
.collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
protected void deleteScratchSnapshots() {
|
||||
@ -250,10 +254,11 @@ public class DebuggerSnapshotTablePanel extends JPanel {
|
||||
return;
|
||||
}
|
||||
TraceTimeManager manager = currentTrace.getTimeManager();
|
||||
snapshotTableModel.addAll(manager.getSnapshots(Long.MIN_VALUE, true, 0, false)
|
||||
.stream()
|
||||
Collection<? extends TraceSnapshot> sratch =
|
||||
manager.getSnapshots(Long.MIN_VALUE, true, 0, false);
|
||||
snapshotTableModel.addAll(sratch.stream()
|
||||
.map(s -> new SnapshotRow(currentTrace, s))
|
||||
.toList());
|
||||
.collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
public ListSelectionModel getSelectionModel() {
|
||||
@ -265,8 +270,12 @@ public class DebuggerSnapshotTablePanel extends JPanel {
|
||||
return row == null ? null : row.getSnap();
|
||||
}
|
||||
|
||||
public void setSelectedSnapshot(Long snap) {
|
||||
public void setCurrentSnapshot(Long snap) {
|
||||
currentSnap = snap;
|
||||
snapshotTableModel.fireTableDataChanged();
|
||||
}
|
||||
|
||||
public void setSelectedSnapshot(Long snap) {
|
||||
if (snap == null) {
|
||||
snapshotTable.clearSelection();
|
||||
return;
|
||||
@ -283,6 +292,5 @@ public class DebuggerSnapshotTablePanel extends JPanel {
|
||||
return;
|
||||
}
|
||||
snapshotFilterPanel.setSelectedItem(row);
|
||||
snapshotTableModel.fireTableDataChanged();
|
||||
}
|
||||
}
|
||||
|
@ -122,8 +122,7 @@ public class DebuggerTimePlugin extends AbstractDebuggerPlugin {
|
||||
@Override
|
||||
public void processEvent(PluginEvent event) {
|
||||
super.processEvent(event);
|
||||
if (event instanceof TraceActivatedPluginEvent) {
|
||||
TraceActivatedPluginEvent ev = (TraceActivatedPluginEvent) event;
|
||||
if (event instanceof TraceActivatedPluginEvent ev) {
|
||||
provider.coordinatesActivated(ev.getActiveCoordinates());
|
||||
}
|
||||
}
|
||||
|
@ -209,7 +209,7 @@ public class DebuggerTimeProvider extends ComponentProviderAdapter {
|
||||
current = coordinates;
|
||||
|
||||
mainPanel.setTrace(current.getTrace());
|
||||
mainPanel.setSelectedSnapshot(current.getSnap());
|
||||
mainPanel.setCurrentSnapshot(current.getSnap());
|
||||
}
|
||||
|
||||
public void writeConfigState(SaveState saveState) {
|
||||
|
@ -1360,23 +1360,23 @@ public class DebuggerLogicalBreakpointServicePlugin extends Plugin
|
||||
|
||||
@Override
|
||||
public void processEvent(PluginEvent event) {
|
||||
if (event instanceof ProgramOpenedPluginEvent evt) {
|
||||
programOpened(evt.getProgram());
|
||||
if (event instanceof ProgramOpenedPluginEvent ev) {
|
||||
programOpened(ev.getProgram());
|
||||
}
|
||||
else if (event instanceof ProgramClosedPluginEvent evt) {
|
||||
programClosed(evt.getProgram());
|
||||
else if (event instanceof ProgramClosedPluginEvent ev) {
|
||||
programClosed(ev.getProgram());
|
||||
}
|
||||
else if (event instanceof TraceOpenedPluginEvent evt) {
|
||||
traceOpened(evt.getTrace());
|
||||
else if (event instanceof TraceOpenedPluginEvent ev) {
|
||||
traceOpened(ev.getTrace());
|
||||
}
|
||||
else if (event instanceof TraceActivatedPluginEvent evt) {
|
||||
traceSnapChanged(evt.getActiveCoordinates());
|
||||
else if (event instanceof TraceActivatedPluginEvent ev) {
|
||||
traceSnapChanged(ev.getActiveCoordinates());
|
||||
}
|
||||
else if (event instanceof TraceInactiveCoordinatesPluginEvent evt) {
|
||||
traceSnapChanged(evt.getCoordinates());
|
||||
else if (event instanceof TraceInactiveCoordinatesPluginEvent ev) {
|
||||
traceSnapChanged(ev.getCoordinates());
|
||||
}
|
||||
else if (event instanceof TraceClosedPluginEvent evt) {
|
||||
traceClosed(evt.getTrace());
|
||||
else if (event instanceof TraceClosedPluginEvent ev) {
|
||||
traceClosed(ev.getTrace());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -20,7 +20,8 @@ import java.util.*;
|
||||
import java.util.concurrent.*;
|
||||
|
||||
import ghidra.app.plugin.PluginCategoryNames;
|
||||
import ghidra.app.plugin.core.debug.*;
|
||||
import ghidra.app.plugin.core.debug.AbstractDebuggerPlugin;
|
||||
import ghidra.app.plugin.core.debug.DebuggerPluginPackage;
|
||||
import ghidra.app.plugin.core.debug.event.*;
|
||||
import ghidra.app.services.*;
|
||||
import ghidra.app.services.DebuggerTraceManagerService.ActivationCause;
|
||||
@ -342,14 +343,14 @@ public class DebuggerControlServicePlugin extends AbstractDebuggerPlugin
|
||||
@Override
|
||||
public void processEvent(PluginEvent event) {
|
||||
super.processEvent(event);
|
||||
if (event instanceof TraceOpenedPluginEvent evt) {
|
||||
installAllMemoryEditors(evt.getTrace());
|
||||
if (event instanceof TraceOpenedPluginEvent ev) {
|
||||
installAllMemoryEditors(ev.getTrace());
|
||||
}
|
||||
else if (event instanceof TraceActivatedPluginEvent evt) {
|
||||
coordinatesActivated(evt.getActiveCoordinates(), evt.getCause());
|
||||
else if (event instanceof TraceActivatedPluginEvent ev) {
|
||||
coordinatesActivated(ev.getActiveCoordinates(), ev.getCause());
|
||||
}
|
||||
else if (event instanceof TraceClosedPluginEvent evt) {
|
||||
uninstallAllMemoryEditors(evt.getTrace());
|
||||
else if (event instanceof TraceClosedPluginEvent ev) {
|
||||
uninstallAllMemoryEditors(ev.getTrace());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -83,6 +83,24 @@ public class DebuggerStaticMappingServicePlugin extends Plugin
|
||||
this.mapping = mapping;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (!(o instanceof MappingEntry that)) {
|
||||
return false;
|
||||
}
|
||||
// Yes, use identity, since it should be the same trace db records
|
||||
if (this.mapping != that.mapping) {
|
||||
return false;
|
||||
}
|
||||
if (this.program != that.program) {
|
||||
return false;
|
||||
}
|
||||
if (!Objects.equals(this.staticRange, that.staticRange)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public Trace getTrace() {
|
||||
return mapping.getTrace();
|
||||
}
|
||||
@ -232,11 +250,14 @@ public class DebuggerStaticMappingServicePlugin extends Plugin
|
||||
|
||||
private void objectRestored() {
|
||||
synchronized (lock) {
|
||||
doAffectedByTraceClosed(trace);
|
||||
var old = Map.copyOf(outbound);
|
||||
outbound.clear();
|
||||
loadOutboundEntries(); // Also places/updates corresponding inbound entries
|
||||
// TODO: What about removed corresponding inbound entries?
|
||||
doAffectedByTraceOpened(trace);
|
||||
if (!old.equals(outbound)) {
|
||||
// TODO: What about removed corresponding inbound entries?
|
||||
doAffectedByTraceClosed(trace);
|
||||
doAffectedByTraceOpened(trace);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -811,13 +832,13 @@ public class DebuggerStaticMappingServicePlugin extends Plugin
|
||||
}
|
||||
|
||||
protected <T> T noTraceInfo() {
|
||||
Msg.warn(this, "The given trace is not open in this tool " +
|
||||
Msg.debug(this, "The given trace is not open in this tool " +
|
||||
"(or the service hasn't received and processed the open-trace event, yet)");
|
||||
return null;
|
||||
}
|
||||
|
||||
protected <T> T noProgramInfo() {
|
||||
Msg.warn(this, "The given program is not open in this tool " +
|
||||
Msg.debug(this, "The given program is not open in this tool " +
|
||||
"(or the service hasn't received and processed the open-program event, yet)");
|
||||
return null;
|
||||
}
|
||||
|
@ -269,7 +269,7 @@ public class DebuggerRegionsProviderTest extends AbstractGhidraHeadedDebuggerTes
|
||||
waitForPass(() -> assertTableSize(0));
|
||||
}
|
||||
|
||||
@Test
|
||||
// @Test // Not gonna with write-behind cache
|
||||
public void testUndoRedo() throws Exception {
|
||||
createAndOpenTrace();
|
||||
|
||||
@ -304,7 +304,7 @@ public class DebuggerRegionsProviderTest extends AbstractGhidraHeadedDebuggerTes
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
// @Test // Not gonna with write-behind cache
|
||||
public void testAbort() throws Exception {
|
||||
createAndOpenTrace();
|
||||
traceManager.activateTrace(tb.trace);
|
||||
|
@ -1128,7 +1128,8 @@ public class DebuggerModelProviderTest extends AbstractGhidraHeadedDebuggerTest
|
||||
}
|
||||
waitForTasks();
|
||||
|
||||
assertEquals(4, modelProvider.attributesTablePanel.tableModel.getModelData().size());
|
||||
waitForPass(() -> assertEquals(4,
|
||||
modelProvider.attributesTablePanel.tableModel.getModelData().size()));
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -1158,6 +1159,7 @@ public class DebuggerModelProviderTest extends AbstractGhidraHeadedDebuggerTest
|
||||
// TODO: Should I collapse entries that are links to the same object?
|
||||
// Would use the "Life" column to display span for each included entry.
|
||||
// Neat, but not sure it's worth it
|
||||
assertEquals(14, modelProvider.elementsTablePanel.tableModel.getModelData().size());
|
||||
waitForPass(() -> assertEquals(14,
|
||||
modelProvider.elementsTablePanel.tableModel.getModelData().size()));
|
||||
}
|
||||
}
|
||||
|
@ -401,7 +401,7 @@ public class DebuggerModulesProviderTest extends AbstractGhidraHeadedDebuggerTes
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
// @Test // Not gonna with write-behind cache
|
||||
public void testUndoRedoCausesUpdateInProvider() throws Exception {
|
||||
createAndOpenTrace();
|
||||
|
||||
|
@ -444,7 +444,7 @@ public class DebuggerThreadsProviderTest extends AbstractGhidraHeadedDebuggerTes
|
||||
thread1.getObject().getAttribute(0, TraceObjectThread.KEY_COMMENT).getValue()));
|
||||
}
|
||||
|
||||
@Test
|
||||
// @Test // Not gonna with write-behind cache
|
||||
public void testUndoRedoCausesUpdateInProvider() throws Exception {
|
||||
createAndOpenTrace();
|
||||
addThreads();
|
||||
|
@ -15,7 +15,8 @@
|
||||
*/
|
||||
package ghidra.app.plugin.core.debug.gui.time;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.util.Calendar;
|
||||
import java.util.List;
|
||||
@ -286,35 +287,7 @@ public class DebuggerTimeProviderTest extends AbstractGhidraHeadedDebuggerTest {
|
||||
tb.trace.getTimeManager().getSnapshot(0, false).getDescription());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testActivateSnapSelectsRow() throws Exception {
|
||||
createSnaplessTrace();
|
||||
traceManager.openTrace(tb.trace);
|
||||
addSnapshots();
|
||||
waitForDomainObject(tb.trace);
|
||||
|
||||
assertProviderEmpty();
|
||||
|
||||
traceManager.activateTrace(tb.trace);
|
||||
waitForSwing();
|
||||
|
||||
List<SnapshotRow> data = timeProvider.mainPanel.snapshotTableModel.getModelData();
|
||||
|
||||
traceManager.activateSnap(0);
|
||||
waitForSwing();
|
||||
|
||||
assertEquals(data.get(0), timeProvider.mainPanel.snapshotFilterPanel.getSelectedItem());
|
||||
|
||||
traceManager.activateSnap(10);
|
||||
waitForSwing();
|
||||
|
||||
assertEquals(data.get(1), timeProvider.mainPanel.snapshotFilterPanel.getSelectedItem());
|
||||
|
||||
traceManager.activateSnap(5);
|
||||
waitForSwing();
|
||||
|
||||
assertNull(timeProvider.mainPanel.snapshotFilterPanel.getSelectedItem());
|
||||
}
|
||||
// TODO: Test activation bolds the row
|
||||
|
||||
@Test
|
||||
public void testDoubleClickRowActivatesSnap() throws Exception {
|
||||
|
@ -47,6 +47,7 @@ import ghidra.trace.model.target.*;
|
||||
import ghidra.trace.model.thread.*;
|
||||
import ghidra.trace.model.time.TraceSnapshot;
|
||||
import ghidra.trace.model.time.TraceTimeManager;
|
||||
import ghidra.util.StreamUtils;
|
||||
import ghidra.util.task.TaskMonitor;
|
||||
|
||||
public class ObjectBasedTraceRecorderTest extends AbstractGhidraHeadedDebuggerTest {
|
||||
@ -88,7 +89,7 @@ public class ObjectBasedTraceRecorderTest extends AbstractGhidraHeadedDebuggerTe
|
||||
|
||||
protected void dumpObjects() {
|
||||
System.err.println("All objects:");
|
||||
for (TraceObject object : objects.getAllObjects()) {
|
||||
for (TraceObject object : StreamUtils.iter(objects.getAllObjects())) {
|
||||
System.err.println(" " + object);
|
||||
}
|
||||
}
|
||||
@ -99,7 +100,7 @@ public class ObjectBasedTraceRecorderTest extends AbstractGhidraHeadedDebuggerTe
|
||||
|
||||
waitForPass(noExc(() -> {
|
||||
waitOn(recorder.flushTransactions());
|
||||
assertEquals(5, objects.getAllObjects().size());
|
||||
assertEquals(5, objects.getObjectCount());
|
||||
}));
|
||||
}
|
||||
|
||||
|
@ -32,6 +32,8 @@ import java.util.function.Consumer;
|
||||
* debouncer configured with a time window that contains all the events, only the final event in the
|
||||
* cluster will be processed. The cost of doing this is a waiting period, so event processing may be
|
||||
* less responsive, but will also be less frantic.
|
||||
*
|
||||
* @param <T> the value type
|
||||
*/
|
||||
public class AsyncDebouncer<T> {
|
||||
protected final AsyncTimer timer;
|
||||
@ -99,7 +101,7 @@ public class AsyncDebouncer<T> {
|
||||
* This sets or resets the timer for the event window. The settled event will fire with the
|
||||
* given value after this waiting period, unless another contact event occurs first.
|
||||
*
|
||||
* @param val
|
||||
* @param val the new value
|
||||
*/
|
||||
public synchronized void contact(T val) {
|
||||
lastContact = val;
|
||||
@ -141,4 +143,16 @@ public class AsyncDebouncer<T> {
|
||||
}
|
||||
return settled();
|
||||
}
|
||||
|
||||
public static class Bypass<T> extends AsyncDebouncer<T> {
|
||||
public Bypass() {
|
||||
super(null, 0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void contact(T val) {
|
||||
lastContact = val;
|
||||
doSettled();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -15,6 +15,7 @@
|
||||
*/
|
||||
package ghidra.trace.database;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.function.Consumer;
|
||||
@ -149,6 +150,8 @@ public class DBTrace extends DBCachedDomainObjectAdapter implements Trace, Trace
|
||||
protected ListenerSet<TraceProgramViewListener> viewListeners =
|
||||
new ListenerSet<>(TraceProgramViewListener.class, true);
|
||||
|
||||
private volatile boolean closing;
|
||||
|
||||
public DBTrace(String name, CompilerSpec baseCompilerSpec, Object consumer)
|
||||
throws IOException, LanguageNotFoundException {
|
||||
super(new DBHandle(), DBOpenMode.CREATE, TaskMonitor.DUMMY, name, DB_TIME_INTERVAL,
|
||||
@ -608,7 +611,7 @@ public class DBTrace extends DBCachedDomainObjectAdapter implements Trace, Trace
|
||||
DBTraceProgramView view;
|
||||
try (LockHold hold = lockRead()) {
|
||||
view = fixedProgramViews.computeIfAbsent(snap, s -> {
|
||||
Msg.debug(this, "Creating fixed view at snap=" + snap);
|
||||
Msg.trace(this, "Creating fixed view at snap=" + snap);
|
||||
return new DBTraceProgramView(this, snap, baseCompilerSpec);
|
||||
});
|
||||
}
|
||||
@ -871,4 +874,29 @@ public class DBTrace extends DBCachedDomainObjectAdapter implements Trace, Trace
|
||||
public void updateViewportsSnapshotDeleted(TraceSnapshot snapshot) {
|
||||
allViewports(v -> v.updateSnapshotDeleted(snapshot));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void save(String comment, TaskMonitor monitor) throws IOException, CancelledException {
|
||||
objectManager.flushWbCaches();
|
||||
super.save(comment, monitor);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void saveToPackedFile(File outputFile, TaskMonitor monitor)
|
||||
throws IOException, CancelledException {
|
||||
objectManager.flushWbCaches();
|
||||
super.saveToPackedFile(outputFile, monitor);
|
||||
}
|
||||
|
||||
public boolean isClosing() {
|
||||
return closing;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void close() {
|
||||
closing = true;
|
||||
objectManager.flushWbCaches();
|
||||
super.close();
|
||||
objectManager.waitWbWorkers();
|
||||
}
|
||||
}
|
||||
|
@ -49,6 +49,21 @@ public abstract class AbstractDBTraceProgramViewMemory
|
||||
|
||||
protected LiveMemoryHandler memoryWriteRedirect;
|
||||
|
||||
private static final int CACHE_PAGE_COUNT = 3;
|
||||
protected final ByteCache cache = new ByteCache(CACHE_PAGE_COUNT) {
|
||||
@Override
|
||||
protected int doLoad(Address address, ByteBuffer buf) throws MemoryAccessException {
|
||||
DBTraceMemorySpace space =
|
||||
program.trace.getMemoryManager().getMemorySpace(address.getAddressSpace(), false);
|
||||
if (space == null) {
|
||||
int len = buf.remaining();
|
||||
buf.position(buf.limit());
|
||||
return len;
|
||||
}
|
||||
return space.getViewBytes(program.snap, address, buf);
|
||||
}
|
||||
};
|
||||
|
||||
public AbstractDBTraceProgramViewMemory(DBTraceProgramView program) {
|
||||
this.program = program;
|
||||
this.memoryManager = program.trace.getMemoryManager();
|
||||
@ -301,24 +316,25 @@ public abstract class AbstractDBTraceProgramViewMemory
|
||||
|
||||
@Override
|
||||
public byte getByte(Address addr) throws MemoryAccessException {
|
||||
MemoryBlock block = getBlock(addr);
|
||||
if (block == null) {
|
||||
return 0; // Memory assumed initialized to 0
|
||||
try (LockHold hold = program.trace.lockRead()) {
|
||||
return cache.read(addr);
|
||||
}
|
||||
return block.getByte(addr);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getBytes(Address addr, byte[] dest, int destIndex, int size)
|
||||
throws MemoryAccessException {
|
||||
MemoryBlock block = getBlock(addr);
|
||||
if (block == null) {
|
||||
int avail = MathUtilities.unsignedMin(Math.max(0, size),
|
||||
addr.getAddressSpace().getMaxAddress().subtract(addr));
|
||||
Arrays.fill(dest, destIndex, avail, (byte) 0);
|
||||
return avail;
|
||||
public int getBytes(Address addr, byte[] b, int off, int len) throws MemoryAccessException {
|
||||
try (LockHold hold = program.trace.lockRead()) {
|
||||
if (cache.canCache(addr, len)) {
|
||||
return cache.read(addr, ByteBuffer.wrap(b, off, len));
|
||||
}
|
||||
AddressSpace as = addr.getAddressSpace();
|
||||
DBTraceMemorySpace space = program.trace.getMemoryManager().getMemorySpace(as, false);
|
||||
if (space == null) {
|
||||
throw new MemoryAccessException("Space does not exist");
|
||||
}
|
||||
len = MathUtilities.unsignedMin(len, as.getMaxAddress().subtract(addr) + 1);
|
||||
return space.getViewBytes(program.snap, addr, ByteBuffer.wrap(b, off, len));
|
||||
}
|
||||
return block.getBytes(addr, dest, destIndex, size);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -26,7 +26,6 @@ import ghidra.program.model.address.*;
|
||||
import ghidra.program.model.mem.*;
|
||||
import ghidra.trace.database.memory.DBTraceMemorySpace;
|
||||
import ghidra.trace.model.memory.TraceMemorySpaceInputStream;
|
||||
import ghidra.util.LockHold;
|
||||
import ghidra.util.MathUtilities;
|
||||
|
||||
public abstract class AbstractDBTraceProgramViewMemoryBlock implements MemoryBlock {
|
||||
@ -97,19 +96,6 @@ public abstract class AbstractDBTraceProgramViewMemoryBlock implements MemoryBlo
|
||||
private final List<MemoryBlockSourceInfo> info =
|
||||
Collections.singletonList(new MyMemoryBlockSourceInfo());
|
||||
|
||||
private static final int CACHE_PAGE_COUNT = 3;
|
||||
private final ByteCache cache = new ByteCache(CACHE_PAGE_COUNT) {
|
||||
@Override
|
||||
protected int doLoad(Address address, ByteBuffer buf) throws MemoryAccessException {
|
||||
DBTraceMemorySpace space =
|
||||
program.trace.getMemoryManager().getMemorySpace(getAddressSpace(), false);
|
||||
if (space == null) {
|
||||
throw new MemoryAccessException("Space does not exist");
|
||||
}
|
||||
return space.getViewBytes(program.snap, address, buf);
|
||||
}
|
||||
};
|
||||
|
||||
protected AbstractDBTraceProgramViewMemoryBlock(DBTraceProgramView program) {
|
||||
this.program = program;
|
||||
}
|
||||
@ -120,15 +106,6 @@ public abstract class AbstractDBTraceProgramViewMemoryBlock implements MemoryBlo
|
||||
return getStart().getAddressSpace();
|
||||
}
|
||||
|
||||
/**
|
||||
* Should be called when the snap changes or when bytes change
|
||||
*/
|
||||
protected void invalidateBytesCache(AddressRange range) {
|
||||
if (range == null || range.intersects(getAddressRange())) {
|
||||
cache.invalidate(range);
|
||||
}
|
||||
}
|
||||
|
||||
protected DBTraceMemorySpace getMemorySpace() {
|
||||
return program.trace.getMemoryManager().getMemorySpace(getAddressSpace(), false);
|
||||
}
|
||||
@ -191,13 +168,11 @@ public abstract class AbstractDBTraceProgramViewMemoryBlock implements MemoryBlo
|
||||
|
||||
@Override
|
||||
public byte getByte(Address addr) throws MemoryAccessException {
|
||||
try (LockHold hold = program.trace.lockRead()) {
|
||||
AddressRange range = getAddressRange();
|
||||
if (!range.contains(addr)) {
|
||||
throw new MemoryAccessException();
|
||||
}
|
||||
return cache.read(addr);
|
||||
AddressRange range = getAddressRange();
|
||||
if (!range.contains(addr)) {
|
||||
throw new MemoryAccessException();
|
||||
}
|
||||
return program.memory.getByte(addr);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -207,22 +182,12 @@ public abstract class AbstractDBTraceProgramViewMemoryBlock implements MemoryBlo
|
||||
|
||||
@Override
|
||||
public int getBytes(Address addr, byte[] b, int off, int len) throws MemoryAccessException {
|
||||
try (LockHold hold = program.trace.lockRead()) {
|
||||
AddressRange range = getAddressRange();
|
||||
if (!range.contains(addr)) {
|
||||
throw new MemoryAccessException();
|
||||
}
|
||||
if (cache.canCache(addr, len)) {
|
||||
return cache.read(addr, ByteBuffer.wrap(b, off, len));
|
||||
}
|
||||
DBTraceMemorySpace space =
|
||||
program.trace.getMemoryManager().getMemorySpace(range.getAddressSpace(), false);
|
||||
if (space == null) {
|
||||
throw new MemoryAccessException("Space does not exist");
|
||||
}
|
||||
len = MathUtilities.unsignedMin(len, range.getMaxAddress().subtract(addr) + 1);
|
||||
return space.getViewBytes(program.snap, addr, ByteBuffer.wrap(b, off, len));
|
||||
AddressRange range = getAddressRange();
|
||||
if (!range.contains(addr)) {
|
||||
throw new MemoryAccessException();
|
||||
}
|
||||
len = MathUtilities.unsignedMin(len, range.getMaxAddress().subtract(addr) + 1);
|
||||
return program.memory.getBytes(addr, b, off, len);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -267,7 +232,6 @@ public abstract class AbstractDBTraceProgramViewMemoryBlock implements MemoryBlo
|
||||
|
||||
@Override
|
||||
public boolean isOverlay() {
|
||||
// TODO: What effect does this have? Does it makes sense for trace "overlays"?
|
||||
return getAddressSpace().isOverlaySpace();
|
||||
}
|
||||
|
||||
|
@ -20,6 +20,7 @@ import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
|
||||
import ghidra.program.model.address.*;
|
||||
import ghidra.program.model.mem.MemoryAccessException;
|
||||
import ghidra.program.model.mem.MemoryBlock;
|
||||
import ghidra.trace.model.memory.TraceMemoryRegion;
|
||||
import ghidra.util.LockHold;
|
||||
@ -202,10 +203,6 @@ public class DBTraceProgramViewMemory extends AbstractDBTraceProgramViewMemory {
|
||||
if (regionBlocks == null) { // <init> order
|
||||
return;
|
||||
}
|
||||
for (AbstractDBTraceProgramViewMemoryBlock block : forceFullView
|
||||
? spaceBlocks.values()
|
||||
: regionBlocks.values()) {
|
||||
block.invalidateBytesCache(range);
|
||||
}
|
||||
cache.invalidate(range);
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,217 @@
|
||||
/* ###
|
||||
* IP: GHIDRA
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package ghidra.trace.database.target;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import ghidra.trace.model.Lifespan;
|
||||
|
||||
public class CachePerDBTraceObject {
|
||||
|
||||
private record SnapKey(long snap, String key) implements Comparable<SnapKey> {
|
||||
@Override
|
||||
public int compareTo(SnapKey that) {
|
||||
int c = Long.compare(this.snap, that.snap);
|
||||
if (c != 0) {
|
||||
return c;
|
||||
}
|
||||
if (this.key == that.key) {
|
||||
return 0;
|
||||
}
|
||||
if (this.key == null) {
|
||||
return 1;
|
||||
}
|
||||
if (that.key == null) {
|
||||
return -1;
|
||||
}
|
||||
return this.key.compareTo(that.key);
|
||||
}
|
||||
|
||||
public static SnapKey forValue(DBTraceObjectValue value) {
|
||||
return new SnapKey(value.getMinSnap(), value.getEntryKey());
|
||||
}
|
||||
}
|
||||
|
||||
public record Cached<T>(boolean isMiss, T value) {
|
||||
static final Cached<?> MISS = new Cached<>(true, null);
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T> Cached<T> miss() {
|
||||
return (Cached<T>) MISS;
|
||||
}
|
||||
|
||||
static <T> Cached<T> hit(T value) {
|
||||
return new Cached<>(false, value);
|
||||
}
|
||||
}
|
||||
|
||||
private static final int MAX_CACHE_KEYS = 200;
|
||||
private static final int MAX_VALUES_PER_KEY = 20;
|
||||
private static final int MAX_VALUES_ANY_KEY = 4000;
|
||||
private static final int EXPANSION = 10;
|
||||
|
||||
private record CachedLifespanValues<K>(Lifespan span,
|
||||
NavigableMap<K, DBTraceObjectValue> values) {
|
||||
}
|
||||
|
||||
private final Map<String, CachedLifespanValues<Long>> perKeyCache = new LinkedHashMap<>() {
|
||||
protected boolean removeEldestEntry(Map.Entry<String, CachedLifespanValues<Long>> eldest) {
|
||||
return size() > MAX_CACHE_KEYS;
|
||||
}
|
||||
};
|
||||
|
||||
private CachedLifespanValues<SnapKey> anyKeyCache = null;
|
||||
|
||||
private Stream<DBTraceObjectValue> doStreamAnyKey(NavigableMap<SnapKey, DBTraceObjectValue> map,
|
||||
Lifespan lifespan) {
|
||||
// TODO: Can be a HashMap, if that's faster
|
||||
return map.values().stream().filter(v -> lifespan.intersects(v.getLifespan()));
|
||||
}
|
||||
|
||||
private Stream<DBTraceObjectValue> doStreamPerKey(NavigableMap<Long, DBTraceObjectValue> map,
|
||||
Lifespan lifespan, boolean forward) {
|
||||
Long min = lifespan.min();
|
||||
var floor = map.floorEntry(min);
|
||||
if (floor != null && floor.getValue().getLifespan().contains(min)) {
|
||||
min = floor.getKey();
|
||||
}
|
||||
NavigableMap<Long, DBTraceObjectValue> sub = map.subMap(min, true, lifespan.max(), true);
|
||||
if (forward) {
|
||||
return sub.values().stream();
|
||||
}
|
||||
return sub.descendingMap().values().stream();
|
||||
}
|
||||
|
||||
private DBTraceObjectValue doGetValue(NavigableMap<Long, DBTraceObjectValue> map, long snap) {
|
||||
Entry<Long, DBTraceObjectValue> floor = map.floorEntry(snap);
|
||||
if (floor == null) {
|
||||
return null;
|
||||
}
|
||||
DBTraceObjectValue value = floor.getValue();
|
||||
if (!value.getLifespan().contains(snap)) {
|
||||
return null;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
public Cached<Stream<DBTraceObjectValue>> streamValues(Lifespan lifespan) {
|
||||
if (anyKeyCache == null) {
|
||||
return Cached.miss();
|
||||
}
|
||||
if (!anyKeyCache.span.encloses(lifespan)) {
|
||||
return Cached.miss();
|
||||
}
|
||||
return Cached.hit(doStreamAnyKey(anyKeyCache.values, lifespan));
|
||||
}
|
||||
|
||||
public Cached<Stream<DBTraceObjectValue>> streamValues(Lifespan lifespan, String key,
|
||||
boolean forward) {
|
||||
CachedLifespanValues<Long> cached = perKeyCache.get(key);
|
||||
if (cached == null) {
|
||||
return Cached.miss();
|
||||
}
|
||||
if (!cached.span.encloses(lifespan)) {
|
||||
return Cached.miss();
|
||||
}
|
||||
return Cached.hit(doStreamPerKey(cached.values, lifespan, forward));
|
||||
}
|
||||
|
||||
public Cached<DBTraceObjectValue> getValue(long snap, String key) {
|
||||
CachedLifespanValues<Long> cached = perKeyCache.get(key);
|
||||
if (cached == null) {
|
||||
return Cached.miss();
|
||||
}
|
||||
if (!cached.span.contains(snap)) {
|
||||
return Cached.miss();
|
||||
}
|
||||
return Cached.hit(doGetValue(cached.values, snap));
|
||||
}
|
||||
|
||||
public Lifespan expandLifespan(Lifespan lifespan) {
|
||||
// Expand the query to take advantage of spatial locality (in the time dimension)
|
||||
long min = lifespan.lmin() - EXPANSION;
|
||||
if (min > lifespan.lmin()) {
|
||||
min = Lifespan.ALL.lmin();
|
||||
}
|
||||
long max = lifespan.lmax() + EXPANSION;
|
||||
if (max < lifespan.lmax()) {
|
||||
max = Lifespan.ALL.lmax();
|
||||
}
|
||||
return Lifespan.span(min, max);
|
||||
}
|
||||
|
||||
private DBTraceObjectValue mergeValues(DBTraceObjectValue v1, DBTraceObjectValue v2) {
|
||||
throw new IllegalStateException("Conflicting values: %s, %s".formatted(v1, v2));
|
||||
}
|
||||
|
||||
private NavigableMap<SnapKey, DBTraceObjectValue> collectAnyKey(
|
||||
Stream<DBTraceObjectValue> values) {
|
||||
return values.collect(
|
||||
Collectors.toMap(SnapKey::forValue, v -> v, this::mergeValues, TreeMap::new));
|
||||
}
|
||||
|
||||
private NavigableMap<Long, DBTraceObjectValue> collectPerKey(
|
||||
Stream<DBTraceObjectValue> values) {
|
||||
return values.collect(
|
||||
Collectors.toMap(v -> v.getLifespan().min(), v -> v, this::mergeValues, TreeMap::new));
|
||||
}
|
||||
|
||||
public Stream<DBTraceObjectValue> offerStreamAnyKey(Lifespan expanded,
|
||||
Stream<DBTraceObjectValue> values, Lifespan lifespan) {
|
||||
NavigableMap<SnapKey, DBTraceObjectValue> map = collectAnyKey(values);
|
||||
anyKeyCache = new CachedLifespanValues<>(expanded, map);
|
||||
return doStreamAnyKey(map, lifespan);
|
||||
}
|
||||
|
||||
public Stream<DBTraceObjectValue> offerStreamPerKey(Lifespan expanded,
|
||||
Stream<DBTraceObjectValue> values, Lifespan lifespan, String key, boolean forward) {
|
||||
NavigableMap<Long, DBTraceObjectValue> map = collectPerKey(values);
|
||||
perKeyCache.put(key, new CachedLifespanValues<>(expanded, map));
|
||||
return doStreamPerKey(map, lifespan, forward);
|
||||
}
|
||||
|
||||
public DBTraceObjectValue offerGetValue(Lifespan expanded, Stream<DBTraceObjectValue> values,
|
||||
long snap, String key) {
|
||||
NavigableMap<Long, DBTraceObjectValue> map = collectPerKey(values);
|
||||
perKeyCache.put(key, new CachedLifespanValues<>(expanded, map));
|
||||
return doGetValue(map, snap);
|
||||
}
|
||||
|
||||
public void notifyValueCreated(DBTraceObjectValue value) {
|
||||
Objects.requireNonNull(value);
|
||||
if (anyKeyCache != null && anyKeyCache.span.intersects(value.getLifespan())) {
|
||||
anyKeyCache.values.put(SnapKey.forValue(value), value);
|
||||
}
|
||||
CachedLifespanValues<Long> cached = perKeyCache.get(value.getEntryKey());
|
||||
if (cached != null && cached.span.intersects(value.getLifespan())) {
|
||||
cached.values.put(value.getLifespan().min(), value);
|
||||
}
|
||||
}
|
||||
|
||||
public void notifyValueDeleted(DBTraceObjectValue value) {
|
||||
Objects.requireNonNull(value);
|
||||
if (anyKeyCache != null) {
|
||||
anyKeyCache.values.remove(SnapKey.forValue(value));
|
||||
}
|
||||
CachedLifespanValues<Long> cached = perKeyCache.get(value.getEntryKey());
|
||||
if (cached != null) {
|
||||
cached.values.remove(value.getLifespan().min());
|
||||
}
|
||||
}
|
||||
}
|
@ -35,7 +35,8 @@ import ghidra.trace.database.memory.DBTraceObjectRegister;
|
||||
import ghidra.trace.database.module.*;
|
||||
import ghidra.trace.database.stack.DBTraceObjectStack;
|
||||
import ghidra.trace.database.stack.DBTraceObjectStackFrame;
|
||||
import ghidra.trace.database.target.InternalTraceObjectValue.ValueLifespanSetter;
|
||||
import ghidra.trace.database.target.CachePerDBTraceObject.Cached;
|
||||
import ghidra.trace.database.target.DBTraceObjectValue.ValueLifespanSetter;
|
||||
import ghidra.trace.database.target.ValueSpace.EntryKeyDimension;
|
||||
import ghidra.trace.database.target.ValueSpace.SnapDimension;
|
||||
import ghidra.trace.database.target.visitors.*;
|
||||
@ -55,8 +56,7 @@ import ghidra.trace.model.target.annot.TraceObjectInterfaceUtils;
|
||||
import ghidra.trace.model.thread.TraceObjectThread;
|
||||
import ghidra.trace.util.TraceChangeRecord;
|
||||
import ghidra.trace.util.TraceEvents;
|
||||
import ghidra.util.LockHold;
|
||||
import ghidra.util.Msg;
|
||||
import ghidra.util.*;
|
||||
import ghidra.util.database.*;
|
||||
import ghidra.util.database.DBCachedObjectStoreFactory.AbstractDBFieldCodec;
|
||||
import ghidra.util.database.annot.*;
|
||||
@ -65,8 +65,6 @@ import ghidra.util.database.annot.*;
|
||||
public class DBTraceObject extends DBAnnotatedObject implements TraceObject {
|
||||
protected static final String TABLE_NAME = "Objects";
|
||||
|
||||
private static final int VALUE_CACHE_SIZE = 50;
|
||||
|
||||
protected static <T extends TraceObjectInterface> //
|
||||
Map.Entry<Class<? extends T>, Function<DBTraceObject, ? extends T>> safeEntry(
|
||||
Class<T> cls, Function<DBTraceObject, ? extends T> ctor) {
|
||||
@ -120,9 +118,6 @@ public class DBTraceObject extends DBAnnotatedObject implements TraceObject {
|
||||
}
|
||||
}
|
||||
|
||||
record CachedLifespanValues(Lifespan span, Set<InternalTraceObjectValue> values) {
|
||||
}
|
||||
|
||||
// Canonical path
|
||||
static final String PATH_COLUMN_NAME = "Path";
|
||||
|
||||
@ -140,17 +135,7 @@ public class DBTraceObject extends DBAnnotatedObject implements TraceObject {
|
||||
private TargetObjectSchema targetSchema;
|
||||
private Map<Class<? extends TraceObjectInterface>, TraceObjectInterface> ifaces;
|
||||
|
||||
private final Map<String, InternalTraceObjectValue> valueCache = new LinkedHashMap<>() {
|
||||
protected boolean removeEldestEntry(Map.Entry<String, InternalTraceObjectValue> eldest) {
|
||||
return size() > VALUE_CACHE_SIZE;
|
||||
}
|
||||
};
|
||||
private final Map<String, Long> nullCache = new LinkedHashMap<>() {
|
||||
protected boolean removeEldestEntry(Map.Entry<String, Long> eldest) {
|
||||
return size() > VALUE_CACHE_SIZE;
|
||||
}
|
||||
};
|
||||
private CachedLifespanValues cachedLifespanValues = null;
|
||||
private final CachePerDBTraceObject cache = new CachePerDBTraceObject();
|
||||
private volatile MutableLifeSet cachedLife = null;
|
||||
|
||||
public DBTraceObject(DBTraceObjectManager manager, DBCachedObjectStore<?> store,
|
||||
@ -245,7 +230,7 @@ public class DBTraceObject extends DBAnnotatedObject implements TraceObject {
|
||||
return DBTraceObjectValPath.of();
|
||||
}
|
||||
DBTraceObject parent = doCreateCanonicalParentObject();
|
||||
InternalTraceObjectValue value = parent.setValue(lifespan, path.key(), this, resolution);
|
||||
DBTraceObjectValue value = parent.setValue(lifespan, path.key(), this, resolution);
|
||||
// TODO: Should I re-order the recursion, so values are inserted from root to this?
|
||||
// TODO: Should child lifespans be allowed to exceed the parent's?
|
||||
DBTraceObjectValPath path = parent.doInsert(lifespan, resolution);
|
||||
@ -276,10 +261,10 @@ public class DBTraceObject extends DBAnnotatedObject implements TraceObject {
|
||||
}
|
||||
|
||||
protected void doRemoveTree(Lifespan span) {
|
||||
for (InternalTraceObjectValue parent : getParents(span)) {
|
||||
for (DBTraceObjectValue parent : getParents(span)) {
|
||||
parent.doTruncateOrDeleteAndEmitLifeChange(span);
|
||||
}
|
||||
for (InternalTraceObjectValue value : getValues(span)) {
|
||||
for (DBTraceObjectValue value : getValues(span)) {
|
||||
value.doTruncateOrDeleteAndEmitLifeChange(span);
|
||||
if (value.isCanonical()) {
|
||||
value.getChild().doRemoveTree(span);
|
||||
@ -294,28 +279,39 @@ public class DBTraceObject extends DBAnnotatedObject implements TraceObject {
|
||||
}
|
||||
}
|
||||
|
||||
protected Stream<DBTraceObjectValueData> streamCanonicalParentsData(Lifespan lifespan) {
|
||||
return manager.valueMap.reduce(TraceObjectValueQuery.canonicalParents(this, lifespan))
|
||||
.values()
|
||||
.stream();
|
||||
}
|
||||
|
||||
protected Stream<DBTraceObjectValueBehind> streamCanonicalParentsBehind(Lifespan lifespan) {
|
||||
return manager.valueWbCache.streamCanonicalParents(this, lifespan);
|
||||
}
|
||||
|
||||
protected Stream<DBTraceObjectValue> streamCanonicalParents(Lifespan lifespan) {
|
||||
return Stream.concat(
|
||||
streamCanonicalParentsData(lifespan).map(v -> v.getWrapper()),
|
||||
streamCanonicalParentsBehind(lifespan).map(v -> v.getWrapper()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public TraceObjectValue getCanonicalParent(long snap) {
|
||||
try (LockHold hold = manager.trace.lockRead()) {
|
||||
if (isRoot()) {
|
||||
return manager.getRootValue();
|
||||
}
|
||||
return manager.valueMap
|
||||
.reduce(TraceObjectValueQuery.canonicalParents(this, Lifespan.at(snap)))
|
||||
.firstValue();
|
||||
return streamCanonicalParents(Lifespan.at(snap)).findAny().orElse(null);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<? extends InternalTraceObjectValue> getCanonicalParents(Lifespan lifespan) {
|
||||
public Stream<DBTraceObjectValue> getCanonicalParents(Lifespan lifespan) {
|
||||
try (LockHold hold = manager.trace.lockRead()) {
|
||||
if (isRoot()) {
|
||||
return Stream.of(manager.getRootValue());
|
||||
}
|
||||
List<InternalTraceObjectValue> list = List.copyOf(
|
||||
manager.valueMap.reduce(TraceObjectValueQuery.canonicalParents(this, lifespan))
|
||||
.values());
|
||||
return list.stream();
|
||||
return streamCanonicalParents(lifespan).toList().stream();
|
||||
}
|
||||
}
|
||||
|
||||
@ -350,56 +346,63 @@ public class DBTraceObject extends DBAnnotatedObject implements TraceObject {
|
||||
return ifCls.cast(ifaces.get(ifCls));
|
||||
}
|
||||
|
||||
protected Collection<? extends InternalTraceObjectValue> doGetParents(Lifespan lifespan) {
|
||||
return List.copyOf(
|
||||
manager.valueMap.reduce(TraceObjectValueQuery.parents(this, lifespan)).values());
|
||||
protected Stream<DBTraceObjectValueData> streamParentsData(Lifespan lifespan) {
|
||||
return manager.valueMap.reduce(TraceObjectValueQuery.parents(this, lifespan))
|
||||
.values()
|
||||
.stream();
|
||||
}
|
||||
|
||||
protected Stream<DBTraceObjectValueBehind> streamParentsBehind(Lifespan lifespan) {
|
||||
return manager.valueWbCache.streamParents(this, lifespan);
|
||||
}
|
||||
|
||||
protected Stream<DBTraceObjectValue> streamParents(Lifespan lifespan) {
|
||||
return Stream.concat(
|
||||
streamParentsData(lifespan).map(v -> v.getWrapper()),
|
||||
streamParentsBehind(lifespan).map(v -> v.getWrapper()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<? extends InternalTraceObjectValue> getParents(Lifespan lifespan) {
|
||||
public Collection<DBTraceObjectValue> getParents(Lifespan lifespan) {
|
||||
try (LockHold hold = manager.trace.lockRead()) {
|
||||
return doGetParents(lifespan);
|
||||
return streamParents(lifespan).toList();
|
||||
}
|
||||
}
|
||||
|
||||
protected boolean doHasAnyValues() {
|
||||
return !manager.valueMap.reduce(TraceObjectValueQuery.values(this, Lifespan.ALL))
|
||||
.isEmpty();
|
||||
return streamValuesW(Lifespan.ALL).findAny().isPresent();
|
||||
}
|
||||
|
||||
protected Collection<? extends InternalTraceObjectValue> doGetValues(Lifespan lifespan) {
|
||||
protected Stream<DBTraceObjectValueData> streamValuesData(Lifespan lifespan) {
|
||||
return manager.valueMap
|
||||
.reduce(TraceObjectValueQuery.values(this, lifespan)
|
||||
.starting(EntryKeyDimension.FORWARD))
|
||||
.values();
|
||||
.values()
|
||||
.stream();
|
||||
}
|
||||
|
||||
protected Collection<? extends InternalTraceObjectValue> cachedDoGetValues(Lifespan lifespan) {
|
||||
if (Long.compareUnsigned(lifespan.lmax() - lifespan.lmin(), 10) > 0) {
|
||||
return List.copyOf(doGetValues(lifespan));
|
||||
protected Stream<DBTraceObjectValueBehind> streamValuesBehind(Lifespan lifespan) {
|
||||
return manager.valueWbCache.streamValues(this, lifespan);
|
||||
}
|
||||
|
||||
protected Stream<DBTraceObjectValue> streamValuesW(Lifespan lifespan) {
|
||||
return Stream.concat(
|
||||
streamValuesData(lifespan).map(d -> d.getWrapper()),
|
||||
streamValuesBehind(lifespan).map(b -> b.getWrapper()));
|
||||
}
|
||||
|
||||
protected Stream<DBTraceObjectValue> streamValuesR(Lifespan lifespan) {
|
||||
Cached<Stream<DBTraceObjectValue>> cached = cache.streamValues(lifespan);
|
||||
if (!cached.isMiss()) {
|
||||
return cached.value();
|
||||
}
|
||||
if (cachedLifespanValues == null || !cachedLifespanValues.span.encloses(lifespan)) {
|
||||
// Expand the query to take advantage of spatial locality (in the time dimension)
|
||||
long min = lifespan.lmin() - 10;
|
||||
if (min > lifespan.lmin()) {
|
||||
min = Lifespan.ALL.lmin();
|
||||
}
|
||||
long max = lifespan.lmax() + 10;
|
||||
if (max < lifespan.lmax()) {
|
||||
max = Lifespan.ALL.lmax();
|
||||
}
|
||||
Lifespan expanded = Lifespan.span(min, max);
|
||||
cachedLifespanValues =
|
||||
new CachedLifespanValues(expanded, new HashSet<>(doGetValues(expanded)));
|
||||
}
|
||||
return cachedLifespanValues.values.stream()
|
||||
.filter(v -> v.getLifespan().intersects(lifespan))
|
||||
.toList();
|
||||
Lifespan expanded = cache.expandLifespan(lifespan);
|
||||
Stream<DBTraceObjectValue> stream = streamValuesW(expanded);
|
||||
return cache.offerStreamAnyKey(expanded, stream, lifespan);
|
||||
}
|
||||
|
||||
protected boolean doHasAnyParents() {
|
||||
return !manager.valueMap.reduce(TraceObjectValueQuery.parents(this, Lifespan.ALL))
|
||||
.isEmpty();
|
||||
return streamParents(Lifespan.ALL).findAny().isPresent();
|
||||
}
|
||||
|
||||
protected boolean doIsConnected() {
|
||||
@ -407,28 +410,32 @@ public class DBTraceObject extends DBAnnotatedObject implements TraceObject {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<? extends InternalTraceObjectValue> getValues(Lifespan lifespan) {
|
||||
public Collection<DBTraceObjectValue> getValues(Lifespan lifespan) {
|
||||
try (LockHold hold = manager.trace.lockRead()) {
|
||||
return cachedDoGetValues(lifespan);
|
||||
return streamValuesR(lifespan).toList();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<? extends InternalTraceObjectValue> getElements(Lifespan lifespan) {
|
||||
return getValues(lifespan).stream()
|
||||
.filter(v -> PathUtils.isIndex(v.getEntryKey()))
|
||||
.toList();
|
||||
public Collection<DBTraceObjectValue> getElements(Lifespan lifespan) {
|
||||
try (LockHold hold = manager.trace.lockRead()) {
|
||||
return streamValuesR(lifespan)
|
||||
.filter(v -> PathUtils.isIndex(v.getEntryKey()))
|
||||
.toList();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<? extends InternalTraceObjectValue> getAttributes(Lifespan lifespan) {
|
||||
return getValues(lifespan).stream()
|
||||
.filter(v -> PathUtils.isName(v.getEntryKey()))
|
||||
.toList();
|
||||
public Collection<DBTraceObjectValue> getAttributes(Lifespan lifespan) {
|
||||
try (LockHold hold = manager.trace.lockRead()) {
|
||||
return streamValuesR(lifespan)
|
||||
.filter(v -> PathUtils.isName(v.getEntryKey()))
|
||||
.toList();
|
||||
}
|
||||
}
|
||||
|
||||
protected void doCheckConflicts(Lifespan span, String key, Object value) {
|
||||
for (InternalTraceObjectValue val : doGetValues(span, key, true)) {
|
||||
for (DBTraceObjectValue val : StreamUtils.iter(streamValuesR(span, key, true))) {
|
||||
if (!Objects.equals(value, val.getValue())) {
|
||||
throw new DuplicateKeyException(key);
|
||||
}
|
||||
@ -438,7 +445,7 @@ public class DBTraceObject extends DBAnnotatedObject implements TraceObject {
|
||||
protected Lifespan doAdjust(Lifespan span, String key, Object value) {
|
||||
// Ordered by min, so I only need to consider the first conflict
|
||||
// If start is contained in an entry, assume the user means to overwrite it.
|
||||
for (InternalTraceObjectValue val : doGetValues(span, key, true)) {
|
||||
for (DBTraceObjectValue val : StreamUtils.iter(streamValuesR(span, key, true))) {
|
||||
if (Objects.equals(value, val.getValue())) {
|
||||
continue; // not a conflict
|
||||
}
|
||||
@ -451,63 +458,94 @@ public class DBTraceObject extends DBAnnotatedObject implements TraceObject {
|
||||
return span;
|
||||
}
|
||||
|
||||
protected Collection<? extends InternalTraceObjectValue> doGetValues(Lifespan span,
|
||||
String key, boolean forward) {
|
||||
protected Stream<DBTraceObjectValueData> streamValuesData(Lifespan span, String key,
|
||||
boolean forward) {
|
||||
return manager.valueMap
|
||||
.reduce(TraceObjectValueQuery.values(this, key, key, span)
|
||||
.starting(forward ? SnapDimension.FORWARD : SnapDimension.BACKWARD))
|
||||
.orderedValues();
|
||||
.orderedValues()
|
||||
.stream();
|
||||
}
|
||||
|
||||
protected Stream<DBTraceObjectValueBehind> streamValuesBehind(Lifespan span, String key,
|
||||
boolean forward) {
|
||||
return manager.valueWbCache.streamValues(this, key, span, forward);
|
||||
}
|
||||
|
||||
protected Stream<DBTraceObjectValue> streamValuesW(Lifespan span, String key, boolean forward) {
|
||||
return StreamUtils.merge(List.of(
|
||||
streamValuesData(span, key, forward).map(d -> d.getWrapper()),
|
||||
streamValuesBehind(span, key, forward).map(b -> b.getWrapper())),
|
||||
Comparator.comparing(forward ? v -> v.getMinSnap() : v -> -v.getMaxSnap()));
|
||||
}
|
||||
|
||||
protected Stream<DBTraceObjectValue> streamValuesR(Lifespan span, String key, boolean forward) {
|
||||
Cached<Stream<DBTraceObjectValue>> cached = cache.streamValues(span, key, forward);
|
||||
if (!cached.isMiss()) {
|
||||
return cached.value();
|
||||
}
|
||||
Lifespan expanded = cache.expandLifespan(span);
|
||||
Stream<DBTraceObjectValue> stream = streamValuesW(expanded, key, forward);
|
||||
return cache.offerStreamPerKey(expanded, stream, span, key, forward);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<? extends InternalTraceObjectValue> getValues(Lifespan span, String key) {
|
||||
public Collection<? extends DBTraceObjectValue> getValues(Lifespan span, String key) {
|
||||
try (LockHold hold = manager.trace.lockRead()) {
|
||||
String k = getTargetSchema().checkAliasedAttribute(key);
|
||||
return doGetValues(span, k, true);
|
||||
return streamValuesR(span, k, true).toList();
|
||||
}
|
||||
}
|
||||
|
||||
protected DBTraceObjectValue getValueW(long snap, String key) {
|
||||
DBTraceObjectValueBehind behind = manager.valueWbCache.get(this, key, snap);
|
||||
if (behind != null) {
|
||||
return behind.getWrapper();
|
||||
}
|
||||
DBTraceObjectValueData data = manager.valueMap
|
||||
.reduce(TraceObjectValueQuery.values(this, key, key, Lifespan.at(snap)))
|
||||
.firstValue();
|
||||
if (data != null) {
|
||||
return data.getWrapper();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
protected DBTraceObjectValue getValueR(long snap, String key) {
|
||||
Cached<DBTraceObjectValue> cached = cache.getValue(snap, key);
|
||||
if (!cached.isMiss()) {
|
||||
return cached.value();
|
||||
}
|
||||
Lifespan expanded = cache.expandLifespan(Lifespan.at(snap));
|
||||
Stream<DBTraceObjectValue> stream = streamValuesW(expanded, key, true);
|
||||
return cache.offerGetValue(expanded, stream, snap, key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DBTraceObjectValue getValue(long snap, String key) {
|
||||
try (LockHold hold = manager.trace.lockRead()) {
|
||||
String k = getTargetSchema().checkAliasedAttribute(key);
|
||||
return getValueR(snap, k);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalTraceObjectValue getValue(long snap, String key) {
|
||||
try (LockHold hold = manager.trace.lockRead()) {
|
||||
String k = getTargetSchema().checkAliasedAttribute(key);
|
||||
InternalTraceObjectValue cached = valueCache.get(k);
|
||||
if (cached != null && !cached.isDeleted() && cached.getLifespan().contains(snap)) {
|
||||
return cached;
|
||||
}
|
||||
Long nullSnap = nullCache.get(k);
|
||||
if (nullSnap != null && nullSnap.longValue() == snap) {
|
||||
return null;
|
||||
}
|
||||
InternalTraceObjectValue found = manager.valueMap
|
||||
.reduce(TraceObjectValueQuery.values(this, k, k, Lifespan.at(snap)))
|
||||
.firstValue();
|
||||
if (found == null) {
|
||||
nullCache.put(k, snap);
|
||||
}
|
||||
else {
|
||||
valueCache.put(k, found);
|
||||
}
|
||||
return found;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<? extends InternalTraceObjectValue> getOrderedValues(Lifespan span, String key,
|
||||
public Stream<DBTraceObjectValue> getOrderedValues(Lifespan span, String key,
|
||||
boolean forward) {
|
||||
try (LockHold hold = manager.trace.lockRead()) {
|
||||
String k = getTargetSchema().checkAliasedAttribute(key);
|
||||
return doGetValues(span, k, forward).stream();
|
||||
// Locking issue if we stream lazily. Capture to list with lock
|
||||
return streamValuesR(span, k, forward).toList().stream();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalTraceObjectValue getElement(long snap, String index) {
|
||||
public DBTraceObjectValue getElement(long snap, String index) {
|
||||
return getValue(snap, PathUtils.makeKey(index));
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalTraceObjectValue getElement(long snap, long index) {
|
||||
public DBTraceObjectValue getElement(long snap, long index) {
|
||||
return getElement(snap, PathUtils.makeIndex(index));
|
||||
}
|
||||
|
||||
@ -521,8 +559,9 @@ public class DBTraceObject extends DBAnnotatedObject implements TraceObject {
|
||||
|
||||
protected Stream<? extends TraceObjectValPath> doStreamVisitor(Lifespan span,
|
||||
Visitor visitor) {
|
||||
// Capturing to list with lock
|
||||
return TreeTraversal.INSTANCE.walkObject(visitor, this, span,
|
||||
DBTraceObjectValPath.of());
|
||||
DBTraceObjectValPath.of()).toList().stream();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -566,7 +605,8 @@ public class DBTraceObject extends DBAnnotatedObject implements TraceObject {
|
||||
DBTraceObjectValPath empty = DBTraceObjectValPath.of();
|
||||
try (LockHold hold = manager.trace.lockRead()) {
|
||||
if (relativePath.isRoot()) {
|
||||
return Stream.of(empty); // Not the empty stream
|
||||
// Singleton of empty path (not the empty stream)
|
||||
return Stream.of(empty);
|
||||
}
|
||||
return doStreamVisitor(span,
|
||||
new OrderedSuccessorsVisitor(relativePath, forward));
|
||||
@ -587,17 +627,12 @@ public class DBTraceObject extends DBAnnotatedObject implements TraceObject {
|
||||
}
|
||||
}
|
||||
|
||||
protected InternalTraceObjectValue doCreateValue(Lifespan lifespan, String key,
|
||||
Object value) {
|
||||
Long nullSnap = nullCache.get(key);
|
||||
if (nullSnap != null && lifespan.contains(nullSnap)) {
|
||||
nullCache.remove(key);
|
||||
}
|
||||
protected DBTraceObjectValue doCreateValue(Lifespan lifespan, String key, Object value) {
|
||||
return manager.doCreateValue(lifespan, this, key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalTraceObjectValue setValue(Lifespan lifespan, String key, Object value,
|
||||
public DBTraceObjectValue setValue(Lifespan lifespan, String key, Object value,
|
||||
ConflictResolution resolution) {
|
||||
try (LockHold hold = manager.trace.lockWrite()) {
|
||||
if (isDeleted()) {
|
||||
@ -614,14 +649,13 @@ public class DBTraceObject extends DBAnnotatedObject implements TraceObject {
|
||||
DBTraceObject canonicalLifeChanged = null;
|
||||
|
||||
@Override
|
||||
protected Iterable<InternalTraceObjectValue> getIntersecting(Long lower,
|
||||
protected Iterable<DBTraceObjectValue> getIntersecting(Long lower,
|
||||
Long upper) {
|
||||
return Collections.unmodifiableCollection(
|
||||
doGetValues(Lifespan.span(lower, upper), k, true));
|
||||
return StreamUtils.iter(streamValuesR(Lifespan.span(lower, upper), k, true));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void remove(InternalTraceObjectValue entry) {
|
||||
protected void remove(DBTraceObjectValue entry) {
|
||||
if (entry.isCanonical()) {
|
||||
canonicalLifeChanged = entry.getChild();
|
||||
}
|
||||
@ -629,8 +663,8 @@ public class DBTraceObject extends DBAnnotatedObject implements TraceObject {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalTraceObjectValue put(Lifespan range, Object value) {
|
||||
InternalTraceObjectValue entry = super.put(range, value);
|
||||
protected DBTraceObjectValue put(Lifespan range, Object value) {
|
||||
DBTraceObjectValue entry = super.put(range, value);
|
||||
if (entry != null && entry.isCanonical()) {
|
||||
canonicalLifeChanged = entry.getChild();
|
||||
}
|
||||
@ -638,11 +672,11 @@ public class DBTraceObject extends DBAnnotatedObject implements TraceObject {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalTraceObjectValue create(Lifespan range, Object value) {
|
||||
protected DBTraceObjectValue create(Lifespan range, Object value) {
|
||||
return doCreateValue(range, k, value);
|
||||
}
|
||||
};
|
||||
InternalTraceObjectValue result = setter.set(lifespan, value);
|
||||
DBTraceObjectValue result = setter.set(lifespan, value);
|
||||
|
||||
DBTraceObject child = setter.canonicalLifeChanged;
|
||||
if (child != null) {
|
||||
@ -767,10 +801,10 @@ public class DBTraceObject extends DBAnnotatedObject implements TraceObject {
|
||||
}
|
||||
|
||||
protected void doDeleteReferringValues() {
|
||||
for (InternalTraceObjectValue child : getValues(Lifespan.ALL)) {
|
||||
for (DBTraceObjectValue child : getValues(Lifespan.ALL)) {
|
||||
child.doDeleteAndEmit();
|
||||
}
|
||||
for (InternalTraceObjectValue parent : getParents(Lifespan.ALL)) {
|
||||
for (DBTraceObjectValue parent : getParents(Lifespan.ALL)) {
|
||||
parent.doDeleteAndEmit();
|
||||
}
|
||||
}
|
||||
@ -800,21 +834,16 @@ public class DBTraceObject extends DBAnnotatedObject implements TraceObject {
|
||||
}
|
||||
}
|
||||
|
||||
protected void notifyValueCreated(InternalTraceObjectValue value) {
|
||||
if (cachedLifespanValues != null) {
|
||||
if (cachedLifespanValues.span.intersects(value.getLifespan())) {
|
||||
cachedLifespanValues.values.add(value);
|
||||
}
|
||||
}
|
||||
protected void notifyValueCreated(DBTraceObjectValue value) {
|
||||
cache.notifyValueCreated(value);
|
||||
}
|
||||
|
||||
protected void notifyValueDeleted(InternalTraceObjectValue value) {
|
||||
if (cachedLifespanValues != null) {
|
||||
cachedLifespanValues.values.remove(value);
|
||||
}
|
||||
protected void notifyValueDeleted(DBTraceObjectValue value) {
|
||||
cache.notifyValueDeleted(value);
|
||||
}
|
||||
|
||||
protected void notifyParentValueCreated(InternalTraceObjectValue parent) {
|
||||
protected void notifyParentValueCreated(DBTraceObjectValue parent) {
|
||||
Objects.requireNonNull(parent);
|
||||
if (cachedLife != null && parent.isCanonical()) {
|
||||
synchronized (cachedLife) {
|
||||
cachedLife.add(parent.getLifespan());
|
||||
@ -822,7 +851,8 @@ public class DBTraceObject extends DBAnnotatedObject implements TraceObject {
|
||||
}
|
||||
}
|
||||
|
||||
protected void notifyParentValueDeleted(InternalTraceObjectValue parent) {
|
||||
protected void notifyParentValueDeleted(DBTraceObjectValue parent) {
|
||||
Objects.requireNonNull(parent);
|
||||
if (cachedLife != null && parent.isCanonical()) {
|
||||
synchronized (cachedLife) {
|
||||
cachedLife.remove(parent.getLifespan());
|
||||
|
@ -22,7 +22,7 @@ import db.LongField;
|
||||
import ghidra.util.database.DBAnnotatedObject;
|
||||
import ghidra.util.database.DBCachedObjectStoreFactory.AbstractDBFieldCodec;
|
||||
|
||||
public class DBTraceObjectDBFieldCodec<OV extends DBAnnotatedObject & InternalTraceObjectValue>
|
||||
public class DBTraceObjectDBFieldCodec<OV extends DBAnnotatedObject & TraceObjectValueStorage>
|
||||
extends AbstractDBFieldCodec<DBTraceObject, OV, LongField> {
|
||||
public DBTraceObjectDBFieldCodec(Class<OV> objectType, Field field, int column) {
|
||||
super(DBTraceObject.class, objectType, LongField.class, field, column);
|
||||
@ -32,7 +32,7 @@ public class DBTraceObjectDBFieldCodec<OV extends DBAnnotatedObject & InternalTr
|
||||
return value == null ? -1 : value.getKey();
|
||||
}
|
||||
|
||||
protected static DBTraceObject decode(InternalTraceObjectValue ent, long enc) {
|
||||
protected static DBTraceObject decode(TraceObjectValueStorage ent, long enc) {
|
||||
return enc == -1 ? null : ent.getManager().getObjectById(enc);
|
||||
}
|
||||
|
||||
|
@ -31,7 +31,6 @@ import ghidra.dbg.target.TargetObject;
|
||||
import ghidra.dbg.target.schema.*;
|
||||
import ghidra.dbg.target.schema.TargetObjectSchema.SchemaName;
|
||||
import ghidra.dbg.util.*;
|
||||
import ghidra.lifecycle.Internal;
|
||||
import ghidra.program.model.address.*;
|
||||
import ghidra.program.model.lang.Language;
|
||||
import ghidra.trace.database.DBTrace;
|
||||
@ -57,8 +56,7 @@ import ghidra.trace.model.thread.TraceObjectThread;
|
||||
import ghidra.trace.model.thread.TraceThread;
|
||||
import ghidra.trace.util.TraceChangeRecord;
|
||||
import ghidra.trace.util.TraceEvents;
|
||||
import ghidra.util.LockHold;
|
||||
import ghidra.util.Msg;
|
||||
import ghidra.util.*;
|
||||
import ghidra.util.database.*;
|
||||
import ghidra.util.database.DBCachedObjectStoreFactory.AbstractDBFieldCodec;
|
||||
import ghidra.util.database.DBCachedObjectStoreFactory.PrimitiveCodec;
|
||||
@ -159,11 +157,11 @@ public class DBTraceObjectManager implements TraceObjectManager, DBTraceManager
|
||||
protected final DBCachedObjectStore<DBTraceObject> objectStore;
|
||||
protected final DBTraceObjectValueRStarTree valueTree;
|
||||
protected final DBTraceObjectValueMap valueMap;
|
||||
protected final DBTraceObjectValueWriteBehindCache valueWbCache;
|
||||
|
||||
protected final DBCachedObjectIndex<TraceObjectKeyPath, DBTraceObject> objectsByPath;
|
||||
|
||||
protected final Collection<TraceObject> objectsView;
|
||||
protected final Collection<TraceObjectValue> valuesView;
|
||||
|
||||
protected TargetObjectSchema rootSchema;
|
||||
|
||||
@ -198,8 +196,9 @@ public class DBTraceObjectManager implements TraceObjectManager, DBTraceManager
|
||||
objectsByPath =
|
||||
objectStore.getIndex(TraceObjectKeyPath.class, DBTraceObject.PATH_COLUMN);
|
||||
|
||||
valueWbCache = new DBTraceObjectValueWriteBehindCache(this);
|
||||
|
||||
objectsView = Collections.unmodifiableCollection(objectStore.asMap().values());
|
||||
valuesView = Collections.unmodifiableCollection(valueMap.values());
|
||||
}
|
||||
|
||||
protected void loadRootSchema() {
|
||||
@ -228,29 +227,42 @@ public class DBTraceObjectManager implements TraceObjectManager, DBTraceManager
|
||||
schemasByInterface.clear();
|
||||
}
|
||||
|
||||
@Internal
|
||||
protected boolean checkMyObject(DBTraceObject object) {
|
||||
if (object.manager != this) {
|
||||
return false;
|
||||
}
|
||||
if (!objectStore.asMap().values().contains(object)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
protected DBTraceObject assertIsMine(TraceObject object) {
|
||||
if (!(object instanceof DBTraceObject)) {
|
||||
if (!(object instanceof DBTraceObject dbObject)) {
|
||||
throw new IllegalArgumentException("Object " + object + " is not part of this trace");
|
||||
}
|
||||
DBTraceObject dbObject = (DBTraceObject) object;
|
||||
if (dbObject.manager != this) {
|
||||
throw new IllegalArgumentException("Object " + object + " is not part of this trace");
|
||||
}
|
||||
if (!getAllObjects().contains(dbObject)) {
|
||||
if (!checkMyObject(dbObject)) {
|
||||
throw new IllegalArgumentException("Object " + object + " is not part of this trace");
|
||||
}
|
||||
return dbObject;
|
||||
}
|
||||
|
||||
protected Object validatePrimitive(Object child) {
|
||||
protected Object validatePrimitive(Object value) {
|
||||
try {
|
||||
PrimitiveCodec.getCodec(child.getClass());
|
||||
PrimitiveCodec.getCodec(value.getClass());
|
||||
}
|
||||
catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException("Cannot encode " + child, e);
|
||||
throw new IllegalArgumentException("Cannot encode " + value, e);
|
||||
}
|
||||
return child;
|
||||
return value;
|
||||
}
|
||||
|
||||
protected Object validateValue(Object value) {
|
||||
if (value instanceof TraceObject | value instanceof Address |
|
||||
value instanceof AddressRange) {
|
||||
return value;
|
||||
}
|
||||
return validatePrimitive(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -267,7 +279,7 @@ public class DBTraceObjectManager implements TraceObjectManager, DBTraceManager
|
||||
rootSchema = schema;
|
||||
}
|
||||
|
||||
protected void emitValueCreated(DBTraceObject parent, InternalTraceObjectValue entry) {
|
||||
protected void emitValueCreated(DBTraceObject parent, DBTraceObjectValue entry) {
|
||||
if (parent == null) {
|
||||
// Don't need event for root value created
|
||||
return;
|
||||
@ -275,24 +287,30 @@ public class DBTraceObjectManager implements TraceObjectManager, DBTraceManager
|
||||
parent.emitEvents(new TraceChangeRecord<>(TraceEvents.VALUE_CREATED, null, entry));
|
||||
}
|
||||
|
||||
protected InternalTraceObjectValue doCreateValue(Lifespan lifespan,
|
||||
protected DBTraceObjectValueData doCreateValueData(Lifespan lifespan, DBTraceObject parent,
|
||||
String key, Object value) {
|
||||
DBTraceObjectValueData entry =
|
||||
valueMap.put(new ImmutableValueShape(parent, value, key, lifespan), null);
|
||||
if (!(value instanceof DBTraceObject)) {
|
||||
entry.doSetPrimitive(value);
|
||||
}
|
||||
return entry;
|
||||
}
|
||||
|
||||
protected DBTraceObjectValue doCreateValue(Lifespan lifespan,
|
||||
DBTraceObject parent, String key, Object value) {
|
||||
InternalTraceObjectValue entry = valueTree.asSpatialMap()
|
||||
.put(new ImmutableValueShape(parent, value, key, lifespan), null);
|
||||
// Root is never in write-behind cache
|
||||
DBTraceObjectValue entry = parent == null
|
||||
? doCreateValueData(lifespan, parent, key, value).getWrapper()
|
||||
: valueWbCache.doCreateValue(lifespan, parent, key, value).getWrapper();
|
||||
if (parent != null) {
|
||||
parent.notifyValueCreated(entry);
|
||||
}
|
||||
if (value instanceof DBTraceObject child) {
|
||||
child.notifyParentValueCreated(entry);
|
||||
}
|
||||
else {
|
||||
entry.doSetPrimitive(value);
|
||||
}
|
||||
|
||||
if (parent != null) { // Root
|
||||
parent.notifyValueCreated(entry);
|
||||
}
|
||||
|
||||
// TODO: Perhaps a little drastic
|
||||
invalidateObjectsContainingCache();
|
||||
|
||||
emitValueCreated(parent, entry);
|
||||
return entry;
|
||||
}
|
||||
@ -326,13 +344,13 @@ public class DBTraceObjectManager implements TraceObjectManager, DBTraceManager
|
||||
}
|
||||
|
||||
@Override
|
||||
public TraceObjectValue createRootObject(TargetObjectSchema schema) {
|
||||
public DBTraceObjectValue createRootObject(TargetObjectSchema schema) {
|
||||
try (LockHold hold = trace.lockWrite()) {
|
||||
setSchema(schema);
|
||||
DBTraceObject root = doCreateObject(TraceObjectKeyPath.of());
|
||||
assert root.getKey() == 0;
|
||||
InternalTraceObjectValue val = doCreateValue(Lifespan.ALL, null, "", root);
|
||||
assert val.getKey() == 0;
|
||||
DBTraceObjectValue val = doCreateValue(Lifespan.ALL, null, "", root);
|
||||
assert val.getWrapped() instanceof DBTraceObjectValueData data && data.getKey() == 0;
|
||||
return val;
|
||||
}
|
||||
}
|
||||
@ -344,9 +362,10 @@ public class DBTraceObjectManager implements TraceObjectManager, DBTraceManager
|
||||
}
|
||||
}
|
||||
|
||||
public DBTraceObjectValueData getRootValue() {
|
||||
public DBTraceObjectValue getRootValue() {
|
||||
try (LockHold hold = trace.lockRead()) {
|
||||
return valueTree.getDataStore().getObjectAt(0);
|
||||
DBTraceObjectValueData data = valueTree.getDataStore().getObjectAt(0);
|
||||
return data == null ? null : data.getWrapper();
|
||||
}
|
||||
}
|
||||
|
||||
@ -381,7 +400,7 @@ public class DBTraceObjectManager implements TraceObjectManager, DBTraceManager
|
||||
public Stream<? extends TraceObjectValPath> getValuePaths(Lifespan span,
|
||||
PathPredicates predicates) {
|
||||
try (LockHold hold = trace.lockRead()) {
|
||||
DBTraceObjectValueData rootVal = getRootValue();
|
||||
DBTraceObjectValue rootVal = getRootValue();
|
||||
if (rootVal == null) {
|
||||
return Stream.of();
|
||||
}
|
||||
@ -390,29 +409,60 @@ public class DBTraceObjectManager implements TraceObjectManager, DBTraceManager
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<? extends TraceObject> getAllObjects() {
|
||||
return objectsView;
|
||||
public Stream<DBTraceObject> getAllObjects() {
|
||||
return objectStore.asMap().values().stream();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<? extends TraceObjectValue> getAllValues() {
|
||||
return valuesView;
|
||||
public int getObjectCount() {
|
||||
return objectStore.getRecordCount();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<DBTraceObjectValue> getAllValues() {
|
||||
return Stream.concat(
|
||||
valueMap.values().stream().map(v -> v.getWrapper()),
|
||||
valueWbCache.streamAllValues().map(v -> v.getWrapper()));
|
||||
}
|
||||
|
||||
protected Stream<DBTraceObjectValueData> streamValuesIntersectingData(Lifespan span,
|
||||
AddressRange range, String entryKey) {
|
||||
return valueMap.reduce(TraceObjectValueQuery.intersecting(
|
||||
entryKey != null ? entryKey : EntryKeyDimension.INSTANCE.absoluteMin(),
|
||||
entryKey != null ? entryKey : EntryKeyDimension.INSTANCE.absoluteMax(),
|
||||
span, range)).values().stream();
|
||||
}
|
||||
|
||||
protected Stream<DBTraceObjectValueBehind> streamValuesIntersectingBehind(Lifespan span,
|
||||
AddressRange range, String entryKey) {
|
||||
return valueWbCache.streamValuesIntersecting(span, range, entryKey);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<? extends TraceObjectValue> getValuesIntersecting(Lifespan span,
|
||||
AddressRange range, String entryKey) {
|
||||
return Collections
|
||||
.unmodifiableCollection(valueMap.reduce(TraceObjectValueQuery.intersecting(
|
||||
entryKey != null ? entryKey : EntryKeyDimension.INSTANCE.absoluteMin(),
|
||||
entryKey != null ? entryKey : EntryKeyDimension.INSTANCE.absoluteMax(),
|
||||
span, range)).values());
|
||||
return Stream.concat(
|
||||
streamValuesIntersectingData(span, range, entryKey).map(v -> v.getWrapper()),
|
||||
streamValuesIntersectingBehind(span, range, entryKey).map(v -> v.getWrapper()))
|
||||
.toList();
|
||||
}
|
||||
|
||||
protected Stream<DBTraceObjectValueData> streamValuesAtData(long snap, Address address,
|
||||
String entryKey) {
|
||||
return valueMap.reduce(TraceObjectValueQuery.at(entryKey, snap, address)).values().stream();
|
||||
}
|
||||
|
||||
protected Stream<DBTraceObjectValueBehind> streamValuesAtBehind(long snap, Address address,
|
||||
String entryKey) {
|
||||
return valueWbCache.streamValuesAt(snap, address, entryKey);
|
||||
}
|
||||
|
||||
public Collection<? extends TraceObjectValue> getValuesAt(long snap, Address address,
|
||||
String entryKey) {
|
||||
return Collections.unmodifiableCollection(
|
||||
valueMap.reduce(TraceObjectValueQuery.at(entryKey, snap, address)).values());
|
||||
return Stream.concat(
|
||||
streamValuesAtData(snap, address, entryKey).map(v -> v.getWrapper()),
|
||||
streamValuesAtBehind(snap, address, entryKey).map(v -> v.getWrapper()))
|
||||
.toList();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -450,6 +500,7 @@ public class DBTraceObjectManager implements TraceObjectManager, DBTraceManager
|
||||
public void clear() {
|
||||
try (LockHold hold = trace.lockWrite()) {
|
||||
valueMap.clear();
|
||||
valueWbCache.clear();
|
||||
objectStore.deleteAll();
|
||||
schemaStore.deleteAll();
|
||||
rootSchema = null;
|
||||
@ -463,8 +514,8 @@ public class DBTraceObjectManager implements TraceObjectManager, DBTraceManager
|
||||
object.emitEvents(new TraceChangeRecord<>(TraceEvents.OBJECT_DELETED, null, object));
|
||||
}
|
||||
|
||||
protected void doDeleteEdge(DBTraceObjectValueData edge) {
|
||||
valueTree.doDeleteEntry(edge);
|
||||
protected void doDeleteValue(DBTraceObjectValueData value) {
|
||||
valueTree.doDeleteEntry(value);
|
||||
|
||||
// TODO: Perhaps a little drastic....
|
||||
/**
|
||||
@ -475,6 +526,12 @@ public class DBTraceObjectManager implements TraceObjectManager, DBTraceManager
|
||||
invalidateObjectsContainingCache();
|
||||
}
|
||||
|
||||
protected void doDeleteCachedValue(DBTraceObjectValueBehind value) {
|
||||
valueWbCache.remove(value);
|
||||
// Ditto NB from doDeleteValue
|
||||
invalidateObjectsContainingCache();
|
||||
}
|
||||
|
||||
public boolean hasSchema() {
|
||||
return rootSchema != null;
|
||||
}
|
||||
@ -594,22 +651,28 @@ public class DBTraceObjectManager implements TraceObjectManager, DBTraceManager
|
||||
}
|
||||
}
|
||||
|
||||
static <I extends TraceObjectInterface> boolean acceptValue(DBTraceObjectValue value,
|
||||
String key, Class<I> ifaceCls, Predicate<? super I> predicate) {
|
||||
if (!value.hasEntryKey(key)) {
|
||||
return false;
|
||||
}
|
||||
TraceObject parent = value.getParent();
|
||||
I iface = parent.queryInterface(ifaceCls);
|
||||
if (iface == null) {
|
||||
return false;
|
||||
}
|
||||
if (!predicate.test(iface)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public <I extends TraceObjectInterface> AddressSetView getObjectsAddressSet(long snap,
|
||||
String key, Class<I> ifaceCls, Predicate<? super I> predicate) {
|
||||
return valueMap.getAddressSetView(Lifespan.at(snap), v -> {
|
||||
if (!v.hasEntryKey(key)) {
|
||||
return false;
|
||||
}
|
||||
TraceObject parent = v.getParent();
|
||||
I iface = parent.queryInterface(ifaceCls);
|
||||
if (iface == null) {
|
||||
return false;
|
||||
}
|
||||
if (!predicate.test(iface)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
return new UnionAddressSetView(
|
||||
valueMap.getAddressSetView(Lifespan.at(snap),
|
||||
v -> acceptValue(v.getWrapper(), key, ifaceCls, predicate)),
|
||||
valueWbCache.getObjectsAddresSet(snap, key, ifaceCls, predicate));
|
||||
}
|
||||
|
||||
public <I extends TraceObjectInterface> I getSuccessor(TraceObject seed,
|
||||
@ -751,24 +814,21 @@ public class DBTraceObjectManager implements TraceObjectManager, DBTraceManager
|
||||
}
|
||||
}
|
||||
|
||||
public boolean checkMyObject(DBTraceObject object) {
|
||||
if (object.manager != this) {
|
||||
return false;
|
||||
}
|
||||
if (!getAllObjects().contains(object)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public TraceThread assertMyThread(TraceThread thread) {
|
||||
if (!(thread instanceof DBTraceObjectThread)) {
|
||||
if (!(thread instanceof DBTraceObjectThread dbThread)) {
|
||||
throw new AssertionError("Thread " + thread + " is not an object in this trace");
|
||||
}
|
||||
DBTraceObjectThread dbThread = (DBTraceObjectThread) thread;
|
||||
if (!checkMyObject(dbThread.getObject())) {
|
||||
throw new AssertionError("Thread " + thread + " is not an object in this trace");
|
||||
}
|
||||
return dbThread;
|
||||
}
|
||||
|
||||
public void flushWbCaches() {
|
||||
valueWbCache.flush();
|
||||
}
|
||||
|
||||
public void waitWbWorkers() {
|
||||
valueWbCache.waitWorkers();
|
||||
}
|
||||
}
|
||||
|
@ -28,18 +28,18 @@ public class DBTraceObjectValPath implements TraceObjectValPath {
|
||||
return EMPTY;
|
||||
}
|
||||
|
||||
public static DBTraceObjectValPath of(Collection<InternalTraceObjectValue> entryList) {
|
||||
public static DBTraceObjectValPath of(Collection<DBTraceObjectValue> entryList) {
|
||||
return new DBTraceObjectValPath(List.copyOf(entryList));
|
||||
}
|
||||
|
||||
public static DBTraceObjectValPath of(InternalTraceObjectValue... entries) {
|
||||
public static DBTraceObjectValPath of(DBTraceObjectValue... entries) {
|
||||
return DBTraceObjectValPath.of(Arrays.asList(entries));
|
||||
}
|
||||
|
||||
private final List<InternalTraceObjectValue> entryList;
|
||||
private final List<DBTraceObjectValue> entryList;
|
||||
private List<String> keyList; // lazily computed
|
||||
|
||||
private DBTraceObjectValPath(List<InternalTraceObjectValue> entryList) {
|
||||
private DBTraceObjectValPath(List<DBTraceObjectValue> entryList) {
|
||||
this.entryList = entryList;
|
||||
}
|
||||
|
||||
@ -49,7 +49,7 @@ public class DBTraceObjectValPath implements TraceObjectValPath {
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<? extends InternalTraceObjectValue> getEntryList() {
|
||||
public List<DBTraceObjectValue> getEntryList() {
|
||||
return entryList;
|
||||
}
|
||||
|
||||
@ -77,10 +77,10 @@ public class DBTraceObjectValPath implements TraceObjectValPath {
|
||||
if (!entryList.isEmpty() && entry.getTrace() != entryList.get(0).getTrace()) {
|
||||
throw new IllegalArgumentException("All values in path must be from the same trace");
|
||||
}
|
||||
if (!(entry instanceof InternalTraceObjectValue val)) {
|
||||
if (!(entry instanceof DBTraceObjectValue val)) {
|
||||
throw new IllegalArgumentException("Value must be in the database");
|
||||
}
|
||||
InternalTraceObjectValue[] arr = new InternalTraceObjectValue[1 + entryList.size()];
|
||||
DBTraceObjectValue[] arr = new DBTraceObjectValue[1 + entryList.size()];
|
||||
arr[0] = val;
|
||||
for (int i = 1; i < arr.length; i++) {
|
||||
arr[i] = entryList.get(i - 1);
|
||||
@ -93,10 +93,10 @@ public class DBTraceObjectValPath implements TraceObjectValPath {
|
||||
if (!entryList.isEmpty() && entry.getTrace() != entryList.get(0).getTrace()) {
|
||||
throw new IllegalArgumentException("All values in path must be from the same trace");
|
||||
}
|
||||
if (!(entry instanceof InternalTraceObjectValue val)) {
|
||||
if (!(entry instanceof DBTraceObjectValue val)) {
|
||||
throw new IllegalArgumentException("Value must be in the database");
|
||||
}
|
||||
InternalTraceObjectValue[] arr = new InternalTraceObjectValue[1 + entryList.size()];
|
||||
DBTraceObjectValue[] arr = new DBTraceObjectValue[1 + entryList.size()];
|
||||
for (int i = 0; i < arr.length - 1; i++) {
|
||||
arr[i] = entryList.get(i);
|
||||
}
|
||||
@ -105,7 +105,7 @@ public class DBTraceObjectValPath implements TraceObjectValPath {
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalTraceObjectValue getFirstEntry() {
|
||||
public DBTraceObjectValue getFirstEntry() {
|
||||
if (entryList.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
@ -114,12 +114,12 @@ public class DBTraceObjectValPath implements TraceObjectValPath {
|
||||
|
||||
@Override
|
||||
public TraceObject getSource(TraceObject ifEmpty) {
|
||||
InternalTraceObjectValue first = getFirstEntry();
|
||||
DBTraceObjectValue first = getFirstEntry();
|
||||
return first == null ? ifEmpty : first.getParent();
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalTraceObjectValue getLastEntry() {
|
||||
public DBTraceObjectValue getLastEntry() {
|
||||
if (entryList.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
@ -128,13 +128,13 @@ public class DBTraceObjectValPath implements TraceObjectValPath {
|
||||
|
||||
@Override
|
||||
public Object getDestinationValue(Object ifEmpty) {
|
||||
InternalTraceObjectValue last = getLastEntry();
|
||||
DBTraceObjectValue last = getLastEntry();
|
||||
return last == null ? ifEmpty : last.getValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TraceObject getDestination(TraceObject ifEmpty) {
|
||||
InternalTraceObjectValue last = getLastEntry();
|
||||
DBTraceObjectValue last = getLastEntry();
|
||||
return last == null ? ifEmpty : last.getChild();
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,400 @@
|
||||
/* ###
|
||||
* IP: GHIDRA
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package ghidra.trace.database.target;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import ghidra.trace.database.DBTraceUtils.LifespanMapSetter;
|
||||
import ghidra.trace.database.target.visitors.TreeTraversal;
|
||||
import ghidra.trace.database.target.visitors.TreeTraversal.Visitor;
|
||||
import ghidra.trace.model.Lifespan;
|
||||
import ghidra.trace.model.Trace;
|
||||
import ghidra.trace.model.target.*;
|
||||
import ghidra.trace.model.target.TraceObject.ConflictResolution;
|
||||
import ghidra.trace.util.TraceChangeRecord;
|
||||
import ghidra.trace.util.TraceEvents;
|
||||
import ghidra.util.LockHold;
|
||||
import ghidra.util.StreamUtils;
|
||||
|
||||
public class DBTraceObjectValue implements TraceObjectValue {
|
||||
|
||||
static abstract class ValueLifespanSetter
|
||||
extends LifespanMapSetter<DBTraceObjectValue, Object> {
|
||||
protected final Lifespan range;
|
||||
protected final Object value;
|
||||
protected DBTraceObjectValue keep = null;
|
||||
protected Collection<DBTraceObjectValue> kept = new ArrayList<>(2);
|
||||
|
||||
public ValueLifespanSetter(Lifespan range, Object value) {
|
||||
this.range = range;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public ValueLifespanSetter(Lifespan range, Object value,
|
||||
DBTraceObjectValue keep) {
|
||||
this(range, value);
|
||||
this.keep = keep;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Lifespan getRange(DBTraceObjectValue entry) {
|
||||
return entry.getLifespan();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Object getValue(DBTraceObjectValue entry) {
|
||||
return entry.getValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean valuesEqual(Object v1, Object v2) {
|
||||
if (Objects.equals(v1, v2)) {
|
||||
return true;
|
||||
}
|
||||
if (v1 == null || !v1.getClass().isArray()) {
|
||||
return false;
|
||||
}
|
||||
if (v1 instanceof boolean[] a1 && v2 instanceof boolean[] a2) {
|
||||
return Arrays.equals(a1, a2);
|
||||
}
|
||||
if (v1 instanceof byte[] a1 && v2 instanceof byte[] a2) {
|
||||
return Arrays.equals(a1, a2);
|
||||
}
|
||||
if (v1 instanceof char[] a1 && v2 instanceof char[] a2) {
|
||||
return Arrays.equals(a1, a2);
|
||||
}
|
||||
if (v1 instanceof double[] a1 && v2 instanceof double[] a2) {
|
||||
return Arrays.equals(a1, a2);
|
||||
}
|
||||
if (v1 instanceof float[] a1 && v2 instanceof float[] a2) {
|
||||
return Arrays.equals(a1, a2);
|
||||
}
|
||||
if (v1 instanceof int[] a1 && v2 instanceof int[] a2) {
|
||||
return Arrays.equals(a1, a2);
|
||||
}
|
||||
if (v1 instanceof long[] a1 && v2 instanceof long[] a2) {
|
||||
return Arrays.equals(a1, a2);
|
||||
}
|
||||
if (v1 instanceof short[] a1 && v2 instanceof short[] a2) {
|
||||
return Arrays.equals(a1, a2);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void remove(DBTraceObjectValue entry) {
|
||||
if (valuesEqual(entry.getValue(), value)) {
|
||||
if (keep == null) {
|
||||
keep = entry;
|
||||
}
|
||||
else {
|
||||
entry.doDeleteAndEmit();
|
||||
}
|
||||
}
|
||||
else {
|
||||
DBTraceObjectValue created = entry.doTruncateOrDelete(range);
|
||||
if (!entry.isDeleted()) {
|
||||
kept.add(entry);
|
||||
}
|
||||
if (created != null) {
|
||||
kept.add(created);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DBTraceObjectValue put(Lifespan range, Object value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
if (keep != null && valuesEqual(this.value, value)) {
|
||||
keep.doSetLifespanAndEmit(range);
|
||||
return keep;
|
||||
}
|
||||
for (DBTraceObjectValue k : kept) {
|
||||
if (valuesEqual(value, k.getValue()) && Objects.equals(range, k.getLifespan())) {
|
||||
kept.remove(k);
|
||||
return k;
|
||||
}
|
||||
}
|
||||
return create(range, value);
|
||||
}
|
||||
|
||||
protected abstract DBTraceObjectValue create(Lifespan range, Object value);
|
||||
}
|
||||
|
||||
private final DBTraceObjectManager manager;
|
||||
|
||||
private volatile TraceObjectValueStorage wrapped;
|
||||
|
||||
public DBTraceObjectValue(DBTraceObjectManager manager,
|
||||
TraceObjectValueStorage wrapped) {
|
||||
this.manager = manager;
|
||||
this.wrapped = wrapped;
|
||||
}
|
||||
|
||||
void setWrapped(TraceObjectValueStorage wrapped) {
|
||||
this.wrapped = wrapped;
|
||||
if (wrapped instanceof DBTraceObjectValueData data) {
|
||||
data.setWrapper(this);
|
||||
}
|
||||
}
|
||||
|
||||
void doSetLifespanAndEmit(Lifespan lifespan) {
|
||||
Lifespan oldLifespan = getLifespan();
|
||||
doSetLifespan(lifespan);
|
||||
getParent().emitEvents(new TraceChangeRecord<>(TraceEvents.VALUE_LIFESPAN_CHANGED,
|
||||
null, this, oldLifespan, lifespan));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Trace getTrace() {
|
||||
return manager.trace;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getEntryKey() {
|
||||
try (LockHold hold = LockHold.lock(manager.lock.readLock())) {
|
||||
return wrapped.getEntryKey();
|
||||
}
|
||||
}
|
||||
|
||||
protected TraceObjectKeyPath doGetCanonicalPath() {
|
||||
DBTraceObject parent = wrapped.getParent();
|
||||
if (parent == null) {
|
||||
return TraceObjectKeyPath.of();
|
||||
}
|
||||
return parent.getCanonicalPath().extend(wrapped.getEntryKey());
|
||||
}
|
||||
|
||||
@Override
|
||||
public TraceObjectKeyPath getCanonicalPath() {
|
||||
try (LockHold hold = LockHold.lock(manager.lock.readLock())) {
|
||||
return doGetCanonicalPath();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getValue() {
|
||||
try (LockHold hold = LockHold.lock(manager.lock.readLock())) {
|
||||
return wrapped.getValue();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isObject() {
|
||||
try (LockHold hold = LockHold.lock(manager.lock.readLock())) {
|
||||
return wrapped.getChildOrNull() != null;
|
||||
}
|
||||
}
|
||||
|
||||
protected boolean doIsCanonical() {
|
||||
DBTraceObject child = wrapped.getChildOrNull();
|
||||
if (child == null) {
|
||||
return false;
|
||||
}
|
||||
if (wrapped.getParent() == null) { // We're the root
|
||||
return true;
|
||||
}
|
||||
return doGetCanonicalPath().equals(child.getCanonicalPath());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCanonical() {
|
||||
try (LockHold hold = LockHold.lock(manager.lock.readLock())) {
|
||||
return doIsCanonical();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Lifespan getLifespan() {
|
||||
try (LockHold hold = LockHold.lock(manager.lock.readLock())) {
|
||||
return wrapped.getLifespan();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setMinSnap(long minSnap) {
|
||||
try (LockHold hold = LockHold.lock(manager.lock.writeLock())) {
|
||||
setLifespan(Lifespan.span(minSnap, getLifespan().lmax()));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getMinSnap() {
|
||||
try (LockHold hold = LockHold.lock(manager.lock.readLock())) {
|
||||
return wrapped.getLifespan().lmin();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setMaxSnap(long maxSnap) {
|
||||
try (LockHold hold = LockHold.lock(manager.lock.writeLock())) {
|
||||
setLifespan(Lifespan.span(getLifespan().lmin(), maxSnap));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getMaxSnap() {
|
||||
try (LockHold hold = LockHold.lock(manager.lock.readLock())) {
|
||||
return wrapped.getLifespan().lmax();
|
||||
}
|
||||
}
|
||||
|
||||
void doDelete() {
|
||||
getParent().notifyValueDeleted(this);
|
||||
DBTraceObject child = wrapped.getChildOrNull();
|
||||
if (child != null) {
|
||||
child.notifyParentValueDeleted(this);
|
||||
}
|
||||
wrapped.doDelete();
|
||||
}
|
||||
|
||||
void doDeleteAndEmit() {
|
||||
DBTraceObject parent = getParent();
|
||||
doDelete();
|
||||
parent.emitEvents(new TraceChangeRecord<>(TraceEvents.VALUE_DELETED, null, this));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete() {
|
||||
try (LockHold hold = LockHold.lock(manager.lock.writeLock())) {
|
||||
if (getParent() == null) {
|
||||
throw new IllegalArgumentException("Cannot delete root value");
|
||||
}
|
||||
doDeleteAndEmit();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDeleted() {
|
||||
try (LockHold hold = LockHold.lock(manager.lock.readLock())) {
|
||||
return wrapped.isDeleted();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public DBTraceObjectValue truncateOrDelete(Lifespan span) {
|
||||
try (LockHold hold = LockHold.lock(manager.lock.writeLock())) {
|
||||
if (wrapped.getParent() == null) {
|
||||
throw new IllegalArgumentException("Cannot truncate or delete root value");
|
||||
}
|
||||
return doTruncateOrDeleteAndEmitLifeChange(span);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public DBTraceObject getChild() {
|
||||
try (LockHold hold = manager.trace.lockRead()) {
|
||||
return (DBTraceObject) wrapped.getValue();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setLifespan(Lifespan lifespan) {
|
||||
setLifespan(lifespan, ConflictResolution.TRUNCATE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setLifespan(Lifespan lifespan, ConflictResolution resolution) {
|
||||
try (LockHold hold = getTrace().lockWrite()) {
|
||||
if (getParent() == null) {
|
||||
throw new IllegalArgumentException("Cannot set lifespan of root value");
|
||||
}
|
||||
if (resolution == ConflictResolution.DENY) {
|
||||
getParent().doCheckConflicts(lifespan, getEntryKey(), getValue());
|
||||
}
|
||||
else if (resolution == ConflictResolution.ADJUST) {
|
||||
lifespan = getParent().doAdjust(lifespan, getEntryKey(), getValue());
|
||||
}
|
||||
new ValueLifespanSetter(lifespan, getValue(), this) {
|
||||
@Override
|
||||
protected Iterable<DBTraceObjectValue> getIntersecting(Long lower,
|
||||
Long upper) {
|
||||
return StreamUtils.iter(getParent().streamValuesR(
|
||||
Lifespan.span(lower, upper), getEntryKey(), true).filter(v -> v != keep));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DBTraceObjectValue create(Lifespan range, Object value) {
|
||||
return getParent().doCreateValue(range, getEntryKey(), value);
|
||||
}
|
||||
}.set(lifespan, getValue());
|
||||
if (isObject()) {
|
||||
DBTraceObject child = getChild();
|
||||
child.emitEvents(
|
||||
new TraceChangeRecord<>(TraceEvents.OBJECT_LIFE_CHANGED, null, child));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void doSetLifespan(Lifespan lifespan) {
|
||||
if (wrapped.getLifespan().equals(lifespan)) {
|
||||
return;
|
||||
}
|
||||
DBTraceObject parent = wrapped.getParent();
|
||||
DBTraceObject child = wrapped.getChildOrNull();
|
||||
parent.notifyValueDeleted(this);
|
||||
if (child != null) {
|
||||
child.notifyParentValueDeleted(this);
|
||||
}
|
||||
wrapped.doSetLifespan(lifespan);
|
||||
parent.notifyValueCreated(this);
|
||||
if (child != null) {
|
||||
child.notifyParentValueCreated(this);
|
||||
}
|
||||
}
|
||||
|
||||
DBTraceObjectValue doTruncateOrDeleteAndEmitLifeChange(Lifespan span) {
|
||||
if (!isCanonical()) {
|
||||
return doTruncateOrDelete(span);
|
||||
}
|
||||
DBTraceObject child = wrapped.getChildOrNull();
|
||||
DBTraceObjectValue result = doTruncateOrDelete(span);
|
||||
child.emitEvents(new TraceChangeRecord<>(TraceEvents.OBJECT_LIFE_CHANGED, null, child));
|
||||
return result;
|
||||
}
|
||||
|
||||
DBTraceObjectValue doTruncateOrDelete(Lifespan span) {
|
||||
List<Lifespan> removed = getLifespan().subtract(span);
|
||||
if (removed.isEmpty()) {
|
||||
doDeleteAndEmit();
|
||||
return null;
|
||||
}
|
||||
doSetLifespanAndEmit(removed.get(0));
|
||||
if (removed.size() == 2) {
|
||||
return getParent().doCreateValue(removed.get(1), getEntryKey(), getValue());
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DBTraceObject getParent() {
|
||||
try (LockHold hold = manager.trace.lockRead()) {
|
||||
return wrapped.getParent();
|
||||
}
|
||||
}
|
||||
|
||||
protected Stream<? extends TraceObjectValPath> doStreamVisitor(Lifespan span,
|
||||
Visitor visitor) {
|
||||
return TreeTraversal.INSTANCE.walkValue(visitor, this, span, null);
|
||||
}
|
||||
|
||||
public TraceObjectValueStorage getWrapped() {
|
||||
return wrapped;
|
||||
}
|
||||
}
|
@ -0,0 +1,101 @@
|
||||
/* ###
|
||||
* IP: GHIDRA
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package ghidra.trace.database.target;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
import ghidra.trace.model.Lifespan;
|
||||
|
||||
public class DBTraceObjectValueBehind implements TraceObjectValueStorage {
|
||||
private final DBTraceObjectManager manager;
|
||||
|
||||
private final DBTraceObject parent;
|
||||
private final String entryKey;
|
||||
private Lifespan lifespan;
|
||||
private final Object value;
|
||||
|
||||
private boolean deleted = false;
|
||||
|
||||
private final DBTraceObjectValue wrapper;
|
||||
|
||||
public DBTraceObjectValueBehind(DBTraceObjectManager manager, DBTraceObject parent,
|
||||
String entryKey, Lifespan lifespan, Object value) {
|
||||
this.manager = manager;
|
||||
|
||||
this.parent = Objects.requireNonNull(parent, "Root cannot be delayed");
|
||||
this.entryKey = entryKey;
|
||||
this.lifespan = lifespan;
|
||||
this.value = value;
|
||||
|
||||
this.wrapper = new DBTraceObjectValue(manager, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getEntryKey() {
|
||||
return entryKey;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Lifespan getLifespan() {
|
||||
return lifespan;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDeleted() {
|
||||
return deleted;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DBTraceObjectManager getManager() {
|
||||
return manager;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DBTraceObject getChildOrNull() {
|
||||
if (value instanceof DBTraceObject child) {
|
||||
return child;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doSetLifespan(Lifespan lifespan) {
|
||||
var values = manager.valueWbCache.doRemoveNoCleanup(this);
|
||||
this.lifespan = lifespan;
|
||||
manager.valueWbCache.doAddDirect(values, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doDelete() {
|
||||
deleted = true;
|
||||
manager.doDeleteCachedValue(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DBTraceObject getParent() {
|
||||
return parent;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DBTraceObjectValue getWrapper() {
|
||||
return wrapper;
|
||||
}
|
||||
}
|
@ -17,17 +17,12 @@ package ghidra.trace.database.target;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import db.DBRecord;
|
||||
import ghidra.program.model.address.Address;
|
||||
import ghidra.program.model.address.AddressRange;
|
||||
import ghidra.trace.database.target.visitors.TreeTraversal;
|
||||
import ghidra.trace.database.target.visitors.TreeTraversal.Visitor;
|
||||
import ghidra.trace.model.Lifespan;
|
||||
import ghidra.trace.model.Trace;
|
||||
import ghidra.trace.model.target.*;
|
||||
import ghidra.util.LockHold;
|
||||
import ghidra.trace.model.target.TraceObject;
|
||||
import ghidra.util.database.DBCachedObjectStore;
|
||||
import ghidra.util.database.DBCachedObjectStoreFactory.*;
|
||||
import ghidra.util.database.DBObjectColumn;
|
||||
@ -36,8 +31,8 @@ import ghidra.util.database.spatial.DBTreeDataRecord;
|
||||
|
||||
@DBAnnotatedObjectInfo(version = 1)
|
||||
public class DBTraceObjectValueData
|
||||
extends DBTreeDataRecord<ValueShape, ValueBox, InternalTraceObjectValue>
|
||||
implements InternalTraceObjectValue, ValueShape {
|
||||
extends DBTreeDataRecord<ValueShape, ValueBox, DBTraceObjectValueData>
|
||||
implements TraceObjectValueStorage, ValueShape {
|
||||
static final String TABLE_NAME = "ObjectValue";
|
||||
|
||||
static final String PARENT_COLUMN_NAME = "Parent"; // R*-Tree parent
|
||||
@ -89,6 +84,8 @@ public class DBTraceObjectValueData
|
||||
protected Address address;
|
||||
protected AddressRange range;
|
||||
|
||||
private DBTraceObjectValue wrapper;
|
||||
|
||||
public DBTraceObjectValueData(DBTraceObjectManager manager, DBTraceObjectValueRStarTree tree,
|
||||
DBCachedObjectStore<?> store, DBRecord record) {
|
||||
super(store, record);
|
||||
@ -96,8 +93,7 @@ public class DBTraceObjectValueData
|
||||
this.tree = tree;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doSetPrimitive(Object primitive) {
|
||||
void doSetPrimitive(Object primitive) {
|
||||
if (primitive instanceof TraceObject) {
|
||||
throw new AssertionError();
|
||||
}
|
||||
@ -193,11 +189,6 @@ public class DBTraceObjectValueData
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Trace getTrace() {
|
||||
return manager.trace;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DBTraceObject getParent() {
|
||||
return objParent;
|
||||
@ -208,51 +199,18 @@ public class DBTraceObjectValueData
|
||||
return entryKey;
|
||||
}
|
||||
|
||||
protected TraceObjectKeyPath doGetCanonicalPath() {
|
||||
if (objParent == null) {
|
||||
return TraceObjectKeyPath.of();
|
||||
}
|
||||
return objParent.getCanonicalPath().extend(entryKey);
|
||||
}
|
||||
|
||||
protected boolean doIsCanonical() {
|
||||
if (child == null) {
|
||||
return false;
|
||||
}
|
||||
if (objParent == null) { // We're the root
|
||||
return true;
|
||||
}
|
||||
return doGetCanonicalPath().equals(child.getCanonicalPath());
|
||||
}
|
||||
|
||||
@Override
|
||||
public TraceObjectKeyPath getCanonicalPath() {
|
||||
try (LockHold hold = LockHold.lock(manager.lock.readLock())) {
|
||||
return doGetCanonicalPath();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCanonical() {
|
||||
try (LockHold hold = LockHold.lock(manager.lock.readLock())) {
|
||||
return doIsCanonical();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getValue() {
|
||||
try (LockHold hold = manager.trace.lockRead()) {
|
||||
if (child != null) {
|
||||
return child;
|
||||
}
|
||||
if (address != null) {
|
||||
return address;
|
||||
}
|
||||
if (range != null) {
|
||||
return range;
|
||||
}
|
||||
return child != null ? child : primitive;
|
||||
if (child != null) {
|
||||
return child;
|
||||
}
|
||||
if (address != null) {
|
||||
return address;
|
||||
}
|
||||
if (range != null) {
|
||||
return range;
|
||||
}
|
||||
return child != null ? child : primitive;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -260,64 +218,9 @@ public class DBTraceObjectValueData
|
||||
return (DBTraceObject) getValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isObject() {
|
||||
return child != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Lifespan getLifespan() {
|
||||
try (LockHold hold = manager.trace.lockRead()) {
|
||||
return lifespan;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setMinSnap(long minSnap) {
|
||||
try (LockHold hold = manager.trace.lockWrite()) {
|
||||
setLifespan(Lifespan.span(minSnap, maxSnap));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getMinSnap() {
|
||||
try (LockHold hold = manager.trace.lockRead()) {
|
||||
return minSnap;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setMaxSnap(long maxSnap) {
|
||||
try (LockHold hold = manager.trace.lockWrite()) {
|
||||
setLifespan(Lifespan.span(minSnap, maxSnap));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getMaxSnap() {
|
||||
try (LockHold hold = manager.trace.lockRead()) {
|
||||
return maxSnap;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete() {
|
||||
try (LockHold hold = LockHold.lock(manager.lock.writeLock())) {
|
||||
if (objParent == null) {
|
||||
throw new IllegalArgumentException("Cannot delete root value");
|
||||
}
|
||||
doDeleteAndEmit();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public TraceObjectValue truncateOrDelete(Lifespan span) {
|
||||
try (LockHold hold = LockHold.lock(manager.lock.writeLock())) {
|
||||
if (objParent == null) {
|
||||
throw new IllegalArgumentException("Cannot truncate or delete root value");
|
||||
}
|
||||
return doTruncateOrDeleteAndEmitLifeChange(span);
|
||||
}
|
||||
return lifespan;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -335,12 +238,12 @@ public class DBTraceObjectValueData
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setRecordValue(InternalTraceObjectValue value) {
|
||||
protected void setRecordValue(DBTraceObjectValueData value) {
|
||||
// Nothing. Entry is the value
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalTraceObjectValue getRecordValue() {
|
||||
protected DBTraceObjectValueData getRecordValue() {
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -396,38 +299,34 @@ public class DBTraceObjectValueData
|
||||
|
||||
@Override
|
||||
public void doSetLifespan(Lifespan lifespan) {
|
||||
if (minSnap == lifespan.lmin() && maxSnap == lifespan.lmax()) {
|
||||
return;
|
||||
}
|
||||
// NB. Wrapper would not call if lifespan weren't different
|
||||
DBTraceObjectValueRStarTree tree = this.tree;
|
||||
tree.doUnparentEntry(this);
|
||||
objParent.notifyValueDeleted(this);
|
||||
if (child != null) {
|
||||
child.notifyParentValueDeleted(this);
|
||||
}
|
||||
minSnap = lifespan.lmin();
|
||||
maxSnap = lifespan.lmax();
|
||||
update(MIN_SNAP_COLUMN, MAX_SNAP_COLUMN);
|
||||
this.lifespan = lifespan;
|
||||
updateBounds();
|
||||
tree.doInsertDataEntry(this);
|
||||
objParent.notifyValueCreated(this);
|
||||
if (child != null) {
|
||||
child.notifyParentValueCreated(this);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doDelete() {
|
||||
objParent.notifyValueDeleted(this);
|
||||
if (child != null) {
|
||||
child.notifyParentValueDeleted(this);
|
||||
}
|
||||
manager.doDeleteEdge(this);
|
||||
manager.doDeleteValue(this);
|
||||
}
|
||||
|
||||
protected Stream<? extends TraceObjectValPath> doStreamVisitor(Lifespan span,
|
||||
Visitor visitor) {
|
||||
return TreeTraversal.INSTANCE.walkValue(visitor, this, span, null);
|
||||
@Override
|
||||
public DBTraceObjectValue getWrapper() {
|
||||
if (wrapper == null) {
|
||||
wrapper = new DBTraceObjectValue(manager, this);
|
||||
}
|
||||
return wrapper;
|
||||
}
|
||||
|
||||
void setWrapper(DBTraceObjectValue wrapper) {
|
||||
if (this.wrapper != null) {
|
||||
throw new AssertionError();
|
||||
}
|
||||
this.wrapper = wrapper;
|
||||
}
|
||||
}
|
||||
|
@ -35,8 +35,8 @@ public class DBTraceObjectValueMapAddressSetView extends AbstractAddressSetView
|
||||
|
||||
private final AddressFactory factory;
|
||||
private final ReadWriteLock lock;
|
||||
private final SpatialMap<ValueShape, InternalTraceObjectValue, TraceObjectValueQuery> map;
|
||||
private final Predicate<? super InternalTraceObjectValue> predicate;
|
||||
private final SpatialMap<ValueShape, DBTraceObjectValueData, TraceObjectValueQuery> map;
|
||||
private final Predicate<? super DBTraceObjectValueData> predicate;
|
||||
|
||||
/**
|
||||
* An address set view that unions all addresses where an entry satisfying the given predicate
|
||||
@ -52,8 +52,8 @@ public class DBTraceObjectValueMapAddressSetView extends AbstractAddressSetView
|
||||
* @param predicate a predicate to further filter entries
|
||||
*/
|
||||
public DBTraceObjectValueMapAddressSetView(AddressFactory factory, ReadWriteLock lock,
|
||||
SpatialMap<ValueShape, InternalTraceObjectValue, TraceObjectValueQuery> map,
|
||||
Predicate<? super InternalTraceObjectValue> predicate) {
|
||||
SpatialMap<ValueShape, DBTraceObjectValueData, TraceObjectValueQuery> map,
|
||||
Predicate<? super DBTraceObjectValueData> predicate) {
|
||||
this.factory = factory;
|
||||
this.lock = lock;
|
||||
this.map = map;
|
||||
@ -63,7 +63,7 @@ public class DBTraceObjectValueMapAddressSetView extends AbstractAddressSetView
|
||||
@Override
|
||||
public boolean contains(Address addr) {
|
||||
try (LockHold hold = LockHold.lock(lock.readLock())) {
|
||||
for (InternalTraceObjectValue value : map
|
||||
for (DBTraceObjectValueData value : map
|
||||
.reduce(TraceObjectValueQuery.intersecting(Lifespan.ALL,
|
||||
new AddressRangeImpl(addr, addr)))
|
||||
.values()) {
|
||||
@ -95,7 +95,7 @@ public class DBTraceObjectValueMapAddressSetView extends AbstractAddressSetView
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
try (LockHold hold = LockHold.lock(lock.readLock())) {
|
||||
for (InternalTraceObjectValue value : map.values()) {
|
||||
for (DBTraceObjectValueData value : map.values()) {
|
||||
if (predicate.test(value)) {
|
||||
return false;
|
||||
}
|
||||
@ -107,7 +107,7 @@ public class DBTraceObjectValueMapAddressSetView extends AbstractAddressSetView
|
||||
@Override
|
||||
public Address getMinAddress() {
|
||||
try (LockHold hold = LockHold.lock(lock.readLock())) {
|
||||
for (Entry<ValueShape, InternalTraceObjectValue> entry : map
|
||||
for (Entry<ValueShape, DBTraceObjectValueData> entry : map
|
||||
.reduce(TraceObjectValueQuery.all().starting(AddressDimension.FORWARD))
|
||||
.orderedEntries()) {
|
||||
if (predicate.test(entry.getValue())) {
|
||||
@ -121,7 +121,7 @@ public class DBTraceObjectValueMapAddressSetView extends AbstractAddressSetView
|
||||
@Override
|
||||
public Address getMaxAddress() {
|
||||
try (LockHold hold = LockHold.lock(lock.readLock())) {
|
||||
for (Entry<ValueShape, InternalTraceObjectValue> entry : map
|
||||
for (Entry<ValueShape, DBTraceObjectValueData> entry : map
|
||||
.reduce(TraceObjectValueQuery.all().starting(AddressDimension.BACKWARD))
|
||||
.orderedEntries()) {
|
||||
if (predicate.test(entry.getValue())) {
|
||||
@ -153,14 +153,14 @@ public class DBTraceObjectValueMapAddressSetView extends AbstractAddressSetView
|
||||
|
||||
protected AddressRangeIterator doGetAddressRanges(RecAddress start, RecAddress end,
|
||||
boolean forward) {
|
||||
Iterator<Entry<ValueShape, InternalTraceObjectValue>> mapIt = map
|
||||
Iterator<Entry<ValueShape, DBTraceObjectValueData>> mapIt = map
|
||||
.reduce(TraceObjectValueQuery
|
||||
.intersecting(EntryKeyDimension.INSTANCE.absoluteMin(),
|
||||
EntryKeyDimension.INSTANCE.absoluteMax(), Lifespan.ALL, start, end)
|
||||
.starting(forward ? AddressDimension.FORWARD : AddressDimension.BACKWARD))
|
||||
.orderedEntries()
|
||||
.iterator();
|
||||
Iterator<Entry<ValueShape, InternalTraceObjectValue>> fltIt =
|
||||
Iterator<Entry<ValueShape, DBTraceObjectValueData>> fltIt =
|
||||
IteratorUtils.filteredIterator(mapIt, e -> predicate.test(e.getValue()));
|
||||
Iterator<AddressRange> rawIt =
|
||||
IteratorUtils.transformedIterator(fltIt, e -> e.getKey().getRange(factory));
|
||||
|
@ -36,16 +36,16 @@ public class DBTraceObjectValueRStarTree extends AbstractHyperRStarTree< //
|
||||
ValueTriple, //
|
||||
ValueShape, DBTraceObjectValueData, //
|
||||
ValueBox, DBTraceObjectValueNode, //
|
||||
InternalTraceObjectValue, TraceObjectValueQuery> {
|
||||
DBTraceObjectValueData, TraceObjectValueQuery> {
|
||||
|
||||
public static class DBTraceObjectValueMap extends AsSpatialMap<ValueShape, //
|
||||
DBTraceObjectValueData, ValueBox, InternalTraceObjectValue, TraceObjectValueQuery> {
|
||||
DBTraceObjectValueData, ValueBox, DBTraceObjectValueData, TraceObjectValueQuery> {
|
||||
|
||||
private final AddressFactory factory;
|
||||
private final ReadWriteLock lock;
|
||||
|
||||
public DBTraceObjectValueMap(AbstractConstraintsTree<ValueShape, DBTraceObjectValueData, //
|
||||
ValueBox, ?, InternalTraceObjectValue, TraceObjectValueQuery> tree,
|
||||
ValueBox, ?, DBTraceObjectValueData, TraceObjectValueQuery> tree,
|
||||
TraceObjectValueQuery query, AddressFactory factory, ReadWriteLock lock) {
|
||||
super(tree, query);
|
||||
this.factory = factory;
|
||||
@ -59,7 +59,7 @@ public class DBTraceObjectValueRStarTree extends AbstractHyperRStarTree< //
|
||||
}
|
||||
|
||||
public AddressSetView getAddressSetView(Lifespan at,
|
||||
Predicate<? super InternalTraceObjectValue> predicate) {
|
||||
Predicate<? super DBTraceObjectValueData> predicate) {
|
||||
return new DBTraceObjectValueMapAddressSetView(factory, lock,
|
||||
this.reduce(TraceObjectValueQuery.intersecting(
|
||||
EntryKeyDimension.INSTANCE.absoluteMin(),
|
||||
|
@ -0,0 +1,380 @@
|
||||
/* ###
|
||||
* IP: GHIDRA
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package ghidra.trace.database.target;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import db.Transaction;
|
||||
import ghidra.async.AsyncReference;
|
||||
import ghidra.program.model.address.*;
|
||||
import ghidra.trace.model.Lifespan;
|
||||
import ghidra.trace.model.target.TraceObjectInterface;
|
||||
import ghidra.trace.model.target.TraceObjectKeyPath;
|
||||
import ghidra.util.*;
|
||||
|
||||
class DBTraceObjectValueWriteBehindCache {
|
||||
public static final int INITIAL_CACHE_SIZE = 1000;
|
||||
public static final int BATCH_SIZE = 100;
|
||||
public static final int DELAY_MS = 10000;
|
||||
|
||||
private final DBTraceObjectManager manager;
|
||||
private final Thread worker;
|
||||
private volatile long mark = 0;
|
||||
private final AsyncReference<Boolean, Void> busy = new AsyncReference<>(false);
|
||||
private volatile boolean flushing = false;
|
||||
|
||||
private final Map<DBTraceObject, Map<String, NavigableMap<Long, DBTraceObjectValueBehind>>> cachedValues =
|
||||
new HashMap<>();
|
||||
|
||||
public DBTraceObjectValueWriteBehindCache(DBTraceObjectManager manager) {
|
||||
this.manager = manager;
|
||||
|
||||
worker = new Thread(this::workLoop, "WriteBehind for " + manager.trace.getName());
|
||||
worker.start();
|
||||
}
|
||||
|
||||
private void workLoop() {
|
||||
while (!manager.trace.isClosed()) {
|
||||
try {
|
||||
synchronized (cachedValues) {
|
||||
if (cachedValues.isEmpty()) {
|
||||
busy.set(false, null);
|
||||
flushing = false;
|
||||
cachedValues.wait();
|
||||
}
|
||||
while (!flushing) {
|
||||
long left = mark - System.currentTimeMillis();
|
||||
if (left <= 0) {
|
||||
break;
|
||||
}
|
||||
Msg.trace(this,
|
||||
"Waiting %d ms. Cache is %d big".formatted(left, cachedValues.size()));
|
||||
cachedValues.wait(left);
|
||||
}
|
||||
}
|
||||
if (manager.trace.isClosed()) {
|
||||
break;
|
||||
}
|
||||
writeBatch();
|
||||
if (!flushing && !manager.trace.isClosing()) {
|
||||
Thread.sleep(100);
|
||||
}
|
||||
}
|
||||
catch (InterruptedException e) {
|
||||
}
|
||||
}
|
||||
busy.set(false, null);
|
||||
flushing = false;
|
||||
}
|
||||
|
||||
private List<DBTraceObjectValueBehind> getBatch() {
|
||||
synchronized (cachedValues) {
|
||||
return doStreamAllValues()
|
||||
.limit(BATCH_SIZE)
|
||||
.toList();
|
||||
}
|
||||
}
|
||||
|
||||
private Stream<DBTraceObjectValueBehind> doStreamAllValues() {
|
||||
return cachedValues.values()
|
||||
.stream()
|
||||
.flatMap(v -> v.values().stream())
|
||||
.flatMap(v -> v.values().stream());
|
||||
}
|
||||
|
||||
private void doAdd(DBTraceObjectValueBehind behind) {
|
||||
var keys = cachedValues.computeIfAbsent(behind.getParent(), k -> new HashMap<>());
|
||||
var values = keys.computeIfAbsent(behind.getEntryKey(), k -> new TreeMap<>());
|
||||
values.put(behind.getLifespan().min(), behind);
|
||||
}
|
||||
|
||||
NavigableMap<Long, DBTraceObjectValueBehind> doRemoveNoCleanup(
|
||||
DBTraceObjectValueBehind behind) {
|
||||
var keys = cachedValues.get(behind.getParent());
|
||||
var values = keys.get(behind.getEntryKey());
|
||||
values.remove(behind.getLifespan().min());
|
||||
return values;
|
||||
}
|
||||
|
||||
void doAddDirect(NavigableMap<Long, DBTraceObjectValueBehind> values,
|
||||
DBTraceObjectValueBehind b) {
|
||||
values.put(b.getLifespan().min(), b);
|
||||
}
|
||||
|
||||
private void doRemove(DBTraceObjectValueBehind behind) {
|
||||
var keys = cachedValues.get(behind.getParent());
|
||||
var values = keys.get(behind.getEntryKey());
|
||||
values.remove(behind.getLifespan().min());
|
||||
if (values.isEmpty()) {
|
||||
keys.remove(behind.getEntryKey());
|
||||
if (keys.isEmpty()) {
|
||||
cachedValues.remove(behind.getParent());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void writeBatch() {
|
||||
try (Transaction tx = manager.trace.openTransaction("Write Behind")) {
|
||||
try (LockHold hold = LockHold.lock(manager.lock.writeLock())) {
|
||||
for (DBTraceObjectValueBehind behind : getBatch()) {
|
||||
synchronized (cachedValues) {
|
||||
doRemove(behind);
|
||||
}
|
||||
DBTraceObjectValueData value = manager.doCreateValueData(behind.getLifespan(),
|
||||
behind.getParent(), behind.getEntryKey(), behind.getValue());
|
||||
behind.getWrapper().setWrapped(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
manager.trace.clearUndo();
|
||||
Msg.trace(this, "Wrote a batch. %d parents remain.".formatted(cachedValues.size()));
|
||||
}
|
||||
|
||||
public DBTraceObjectValueBehind doCreateValue(Lifespan lifespan, DBTraceObject parent,
|
||||
String key, Object value) {
|
||||
if (manager.trace.isClosing()) {
|
||||
throw new IllegalStateException("Trace is closing");
|
||||
}
|
||||
DBTraceObjectValueBehind entry =
|
||||
new DBTraceObjectValueBehind(manager, parent, key, lifespan,
|
||||
manager.validateValue(value));
|
||||
synchronized (cachedValues) {
|
||||
doAdd(entry);
|
||||
mark = System.currentTimeMillis() + DELAY_MS;
|
||||
busy.set(true, null);
|
||||
cachedValues.notify();
|
||||
}
|
||||
return entry;
|
||||
}
|
||||
|
||||
public void remove(DBTraceObjectValueBehind value) {
|
||||
synchronized (cachedValues) {
|
||||
doRemove(value);
|
||||
}
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
synchronized (cachedValues) {
|
||||
cachedValues.clear();
|
||||
}
|
||||
}
|
||||
|
||||
public Stream<DBTraceObjectValueBehind> streamAllValues() {
|
||||
return doStreamAllValues();
|
||||
}
|
||||
|
||||
public DBTraceObjectValueBehind get(DBTraceObject parent, String key, long snap) {
|
||||
var keys = cachedValues.get(parent);
|
||||
if (keys == null) {
|
||||
return null;
|
||||
}
|
||||
var values = keys.get(key);
|
||||
if (values == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var floor = values.floorEntry(snap);
|
||||
if (floor == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!floor.getValue().getLifespan().contains(snap)) {
|
||||
return null;
|
||||
}
|
||||
return floor.getValue();
|
||||
}
|
||||
|
||||
public Stream<DBTraceObjectValueBehind> streamParents(DBTraceObject child, Lifespan lifespan) {
|
||||
// TODO: Optimize/index this?
|
||||
return streamAllValues()
|
||||
.filter(v -> v.getValue() == child && v.getLifespan().intersects(lifespan));
|
||||
}
|
||||
|
||||
private Stream<DBTraceObjectValueBehind> streamSub(
|
||||
NavigableMap<Long, DBTraceObjectValueBehind> map, Lifespan span, boolean forward) {
|
||||
Long floor = map.floorKey(span.min());
|
||||
if (floor == null) {
|
||||
floor = span.min();
|
||||
}
|
||||
var sub = map.subMap(floor, true, span.max(), true);
|
||||
if (!forward) {
|
||||
sub = sub.descendingMap();
|
||||
}
|
||||
return sub.values().stream();
|
||||
}
|
||||
|
||||
public Stream<DBTraceObjectValueBehind> streamCanonicalParents(DBTraceObject child,
|
||||
Lifespan lifespan) {
|
||||
TraceObjectKeyPath path = child.getCanonicalPath();
|
||||
if (path.isRoot()) {
|
||||
return Stream.of();
|
||||
}
|
||||
String entryKey = path.key();
|
||||
// TODO: Better indexing?
|
||||
return cachedValues.values()
|
||||
.stream()
|
||||
.flatMap(v -> v.entrySet()
|
||||
.stream()
|
||||
.filter(e -> entryKey.equals(e.getKey()))
|
||||
.map(e -> e.getValue()))
|
||||
.flatMap(v -> streamSub(v, lifespan, true));
|
||||
}
|
||||
|
||||
public Stream<DBTraceObjectValueBehind> streamValues(DBTraceObject parent, Lifespan lifespan) {
|
||||
// TODO: Better indexing?
|
||||
var keys = cachedValues.get(parent);
|
||||
if (keys == null) {
|
||||
return Stream.of();
|
||||
}
|
||||
return keys.values().stream().flatMap(v -> streamSub(v, lifespan, true));
|
||||
}
|
||||
|
||||
public Stream<DBTraceObjectValueBehind> streamValues(DBTraceObject parent, String key,
|
||||
Lifespan lifespan, boolean forward) {
|
||||
var keys = cachedValues.get(parent);
|
||||
if (keys == null) {
|
||||
return Stream.of();
|
||||
}
|
||||
var values = keys.get(key);
|
||||
if (values == null) {
|
||||
return Stream.of();
|
||||
}
|
||||
return streamSub(values, lifespan, forward);
|
||||
}
|
||||
|
||||
static boolean intersectsRange(Object value, AddressRange range) {
|
||||
return (value instanceof Address av && range.contains(av)) ||
|
||||
(value instanceof AddressRange rv && range.intersects(rv));
|
||||
}
|
||||
|
||||
private Stream<DBTraceObjectValueBehind> streamValuesIntersectingLifespan(Lifespan lifespan,
|
||||
String entryKey) {
|
||||
// TODO: In-memory spatial index?
|
||||
var top = cachedValues.values().stream();
|
||||
var keys = entryKey == null
|
||||
? top.flatMap(v -> v.values().stream())
|
||||
: top.flatMap(v -> v.entrySet()
|
||||
.stream()
|
||||
.filter(e -> entryKey.equals(e.getKey()))
|
||||
.map(e -> e.getValue()));
|
||||
return keys.flatMap(v -> streamSub(v, lifespan, true));
|
||||
}
|
||||
|
||||
public Stream<DBTraceObjectValueBehind> streamValuesIntersecting(Lifespan lifespan,
|
||||
AddressRange range, String entryKey) {
|
||||
return streamValuesIntersectingLifespan(lifespan, entryKey)
|
||||
.filter(v -> intersectsRange(v.getValue(), range));
|
||||
}
|
||||
|
||||
static boolean atAddress(Object value, Address address) {
|
||||
return (value instanceof Address av && address.equals(av)) ||
|
||||
(value instanceof AddressRange rv && rv.contains(address));
|
||||
}
|
||||
|
||||
public Stream<DBTraceObjectValueBehind> streamValuesAt(long snap, Address address,
|
||||
String entryKey) {
|
||||
return streamValuesIntersectingLifespan(Lifespan.at(snap), entryKey)
|
||||
.filter(v -> atAddress(v.getValue(), address));
|
||||
}
|
||||
|
||||
static AddressRange getIfRangeOrAddress(Object v) {
|
||||
if (v instanceof AddressRange rv) {
|
||||
return rv;
|
||||
}
|
||||
if (v instanceof Address av) {
|
||||
return new AddressRangeImpl(av, av);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public <I extends TraceObjectInterface> AddressSetView getObjectsAddresSet(long snap,
|
||||
String key, Class<I> ifaceCls, Predicate<? super I> predicate) {
|
||||
return new AbstractAddressSetView() {
|
||||
AddressSet collectRanges() {
|
||||
AddressSet result = new AddressSet();
|
||||
for (DBTraceObjectValueBehind v : StreamUtils
|
||||
.iter(streamValuesIntersectingLifespan(Lifespan.at(snap), key))) {
|
||||
AddressRange range = getIfRangeOrAddress(v.getValue());
|
||||
if (range == null) {
|
||||
continue;
|
||||
}
|
||||
if (!DBTraceObjectManager.acceptValue(v.getWrapper(), key, ifaceCls,
|
||||
predicate)) {
|
||||
continue;
|
||||
}
|
||||
result.add(range);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(Address addr) {
|
||||
for (DBTraceObjectValueBehind v : StreamUtils
|
||||
.iter(streamValuesIntersectingLifespan(Lifespan.at(snap), key))) {
|
||||
if (!addr.equals(v.getValue())) {
|
||||
continue;
|
||||
}
|
||||
if (!DBTraceObjectManager.acceptValue(v.getWrapper(), key, ifaceCls,
|
||||
predicate)) {
|
||||
continue;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AddressRangeIterator getAddressRanges() {
|
||||
return collectRanges().getAddressRanges();
|
||||
}
|
||||
|
||||
@Override
|
||||
public AddressRangeIterator getAddressRanges(boolean forward) {
|
||||
return collectRanges().getAddressRanges(forward);
|
||||
}
|
||||
|
||||
@Override
|
||||
public AddressRangeIterator getAddressRanges(Address start, boolean forward) {
|
||||
// TODO: Could cull during collection
|
||||
return collectRanges().getAddressRanges(start, forward);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public void flush() {
|
||||
flushing = true;
|
||||
worker.interrupt();
|
||||
try {
|
||||
busy.waitValue(false).get();
|
||||
}
|
||||
catch (InterruptedException | ExecutionException e) {
|
||||
throw new AssertionError(e);
|
||||
}
|
||||
}
|
||||
|
||||
public void waitWorkers() {
|
||||
worker.interrupt();
|
||||
try {
|
||||
worker.join(10000);
|
||||
}
|
||||
catch (InterruptedException e) {
|
||||
throw new AssertionError(e);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,198 +0,0 @@
|
||||
/* ###
|
||||
* IP: GHIDRA
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package ghidra.trace.database.target;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
import org.apache.commons.collections4.IterableUtils;
|
||||
|
||||
import ghidra.trace.database.DBTraceUtils.LifespanMapSetter;
|
||||
import ghidra.trace.model.Lifespan;
|
||||
import ghidra.trace.model.target.TraceObject.ConflictResolution;
|
||||
import ghidra.trace.model.target.TraceObjectValue;
|
||||
import ghidra.trace.util.TraceChangeRecord;
|
||||
import ghidra.trace.util.TraceEvents;
|
||||
import ghidra.util.LockHold;
|
||||
|
||||
interface InternalTraceObjectValue extends TraceObjectValue {
|
||||
abstract class ValueLifespanSetter
|
||||
extends LifespanMapSetter<InternalTraceObjectValue, Object> {
|
||||
protected final Lifespan range;
|
||||
protected final Object value;
|
||||
protected InternalTraceObjectValue keep = null;
|
||||
protected Collection<InternalTraceObjectValue> kept = new ArrayList<>(2);
|
||||
|
||||
public ValueLifespanSetter(Lifespan range, Object value) {
|
||||
this.range = range;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public ValueLifespanSetter(Lifespan range, Object value,
|
||||
InternalTraceObjectValue keep) {
|
||||
this(range, value);
|
||||
this.keep = keep;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Lifespan getRange(InternalTraceObjectValue entry) {
|
||||
return entry.getLifespan();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Object getValue(InternalTraceObjectValue entry) {
|
||||
return entry.getValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void remove(InternalTraceObjectValue entry) {
|
||||
if (Objects.equals(entry.getValue(), value)) {
|
||||
if (keep == null) {
|
||||
keep = entry;
|
||||
}
|
||||
else {
|
||||
entry.doDeleteAndEmit();
|
||||
}
|
||||
}
|
||||
else {
|
||||
InternalTraceObjectValue created = entry.doTruncateOrDelete(range);
|
||||
if (!entry.isDeleted()) {
|
||||
kept.add(entry);
|
||||
}
|
||||
if (created != null) {
|
||||
kept.add(created);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalTraceObjectValue put(Lifespan range, Object value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
if (keep != null && Objects.equals(this.value, value)) {
|
||||
keep.doSetLifespanAndEmit(range);
|
||||
return keep;
|
||||
}
|
||||
for (InternalTraceObjectValue k : kept) {
|
||||
if (Objects.equals(value, k.getValue()) && Objects.equals(range, k.getLifespan())) {
|
||||
kept.remove(k);
|
||||
return k;
|
||||
}
|
||||
}
|
||||
return create(range, value);
|
||||
}
|
||||
|
||||
protected abstract InternalTraceObjectValue create(Lifespan range, Object value);
|
||||
}
|
||||
|
||||
void doSetPrimitive(Object primitive);
|
||||
|
||||
DBTraceObjectManager getManager();
|
||||
|
||||
/**
|
||||
* Get the database key
|
||||
*
|
||||
* @return the key
|
||||
*/
|
||||
long getKey();
|
||||
|
||||
@Override
|
||||
DBTraceObject getChild();
|
||||
|
||||
DBTraceObject getChildOrNull();
|
||||
|
||||
void doSetLifespan(Lifespan lifespan);
|
||||
|
||||
default void doSetLifespanAndEmit(Lifespan lifespan) {
|
||||
Lifespan oldLifespan = getLifespan();
|
||||
doSetLifespan(lifespan);
|
||||
getParent().emitEvents(new TraceChangeRecord<>(TraceEvents.VALUE_LIFESPAN_CHANGED, null,
|
||||
this, oldLifespan, lifespan));
|
||||
}
|
||||
|
||||
@Override
|
||||
default void setLifespan(Lifespan lifespan) {
|
||||
setLifespan(lifespan, ConflictResolution.TRUNCATE);
|
||||
}
|
||||
|
||||
@Override
|
||||
default void setLifespan(Lifespan lifespan, ConflictResolution resolution) {
|
||||
try (LockHold hold = getTrace().lockWrite()) {
|
||||
if (getParent() == null) {
|
||||
throw new IllegalArgumentException("Cannot set lifespan of root value");
|
||||
}
|
||||
if (resolution == ConflictResolution.DENY) {
|
||||
getParent().doCheckConflicts(lifespan, getEntryKey(), getValue());
|
||||
}
|
||||
else if (resolution == ConflictResolution.ADJUST) {
|
||||
lifespan = getParent().doAdjust(lifespan, getEntryKey(), getValue());
|
||||
}
|
||||
new ValueLifespanSetter(lifespan, getValue(), this) {
|
||||
@Override
|
||||
protected Iterable<InternalTraceObjectValue> getIntersecting(Long lower,
|
||||
Long upper) {
|
||||
Collection<InternalTraceObjectValue> col = Collections.unmodifiableCollection(
|
||||
getParent().doGetValues(Lifespan.span(lower, upper), getEntryKey(), true));
|
||||
return IterableUtils.filteredIterable(col, v -> v != keep);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalTraceObjectValue create(Lifespan range, Object value) {
|
||||
return getParent().doCreateValue(range, getEntryKey(), value);
|
||||
}
|
||||
}.set(lifespan, getValue());
|
||||
if (isObject()) {
|
||||
DBTraceObject child = getChild();
|
||||
child.emitEvents(
|
||||
new TraceChangeRecord<>(TraceEvents.OBJECT_LIFE_CHANGED, null, child));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void doDelete();
|
||||
|
||||
default void doDeleteAndEmit() {
|
||||
DBTraceObject parent = getParent();
|
||||
doDelete();
|
||||
parent.emitEvents(new TraceChangeRecord<>(TraceEvents.VALUE_DELETED, null, this));
|
||||
}
|
||||
|
||||
@Override
|
||||
DBTraceObject getParent();
|
||||
|
||||
default InternalTraceObjectValue doTruncateOrDeleteAndEmitLifeChange(Lifespan span) {
|
||||
if (!isCanonical()) {
|
||||
return doTruncateOrDelete(span);
|
||||
}
|
||||
DBTraceObject child = getChildOrNull();
|
||||
InternalTraceObjectValue result = doTruncateOrDelete(span);
|
||||
child.emitEvents(new TraceChangeRecord<>(TraceEvents.OBJECT_LIFE_CHANGED, null, child));
|
||||
return result;
|
||||
}
|
||||
|
||||
default InternalTraceObjectValue doTruncateOrDelete(Lifespan span) {
|
||||
List<Lifespan> removed = getLifespan().subtract(span);
|
||||
if (removed.isEmpty()) {
|
||||
doDeleteAndEmit();
|
||||
return null;
|
||||
}
|
||||
doSetLifespanAndEmit(removed.get(0));
|
||||
if (removed.size() == 2) {
|
||||
return getParent().doCreateValue(removed.get(1), getEntryKey(), getValue());
|
||||
}
|
||||
return this;
|
||||
}
|
||||
}
|
@ -0,0 +1,48 @@
|
||||
/* ###
|
||||
* IP: GHIDRA
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package ghidra.trace.database.target;
|
||||
|
||||
import ghidra.trace.model.Lifespan;
|
||||
|
||||
interface TraceObjectValueStorage {
|
||||
DBTraceObjectManager getManager();
|
||||
|
||||
DBTraceObjectValue getWrapper();
|
||||
|
||||
DBTraceObject getParent();
|
||||
|
||||
String getEntryKey();
|
||||
|
||||
/**
|
||||
* Just set the lifespan, no notifications
|
||||
*
|
||||
* <p>
|
||||
* The wrapper will notify the parent and child, if necessary.
|
||||
*
|
||||
* @param lifespan the new lifespan
|
||||
*/
|
||||
void doSetLifespan(Lifespan lifespan);
|
||||
|
||||
Lifespan getLifespan();
|
||||
|
||||
DBTraceObject getChildOrNull();
|
||||
|
||||
Object getValue();
|
||||
|
||||
boolean isDeleted();
|
||||
|
||||
void doDelete();
|
||||
}
|
@ -77,10 +77,9 @@ public class DBTraceThreadManager implements TraceThreadManager, DBTraceManager
|
||||
if (objectManager.hasSchema()) {
|
||||
return objectManager.assertMyThread(thread);
|
||||
}
|
||||
if (!(thread instanceof DBTraceThread)) {
|
||||
if (!(thread instanceof DBTraceThread dbThread)) {
|
||||
throw new IllegalArgumentException("Thread " + thread + " is not part of this trace");
|
||||
}
|
||||
DBTraceThread dbThread = (DBTraceThread) thread;
|
||||
if (dbThread.manager != this) {
|
||||
throw new IllegalArgumentException("Thread " + thread + " is not part of this trace");
|
||||
}
|
||||
|
@ -123,16 +123,23 @@ public interface TraceObjectManager {
|
||||
/**
|
||||
* Get all the objects in the database
|
||||
*
|
||||
* @return the collection of all objects
|
||||
* @return the stream of all objects
|
||||
*/
|
||||
Collection<? extends TraceObject> getAllObjects();
|
||||
Stream<? extends TraceObject> getAllObjects();
|
||||
|
||||
/**
|
||||
* Get the number of objects in the database
|
||||
*
|
||||
* @return the number of objects
|
||||
*/
|
||||
int getObjectCount();
|
||||
|
||||
/**
|
||||
* Get all the values (edges) in the database
|
||||
*
|
||||
* @return the collect of all values
|
||||
* @return the stream of all values
|
||||
*/
|
||||
Collection<? extends TraceObjectValue> getAllValues();
|
||||
Stream<? extends TraceObjectValue> getAllValues();
|
||||
|
||||
/**
|
||||
* Get all address-ranged values intersecting the given span and address range
|
||||
|
@ -729,6 +729,7 @@ public class ToyDBTraceBuilder implements AutoCloseable {
|
||||
public File save() throws IOException, CancelledException {
|
||||
Path tmp = Files.createTempFile("test", ".db");
|
||||
Files.delete(tmp); // saveAs must create the file
|
||||
trace.objectManager.flushWbCaches();
|
||||
trace.getDBHandle().saveAs(tmp.toFile(), false, new ConsoleTaskMonitor());
|
||||
return tmp.toFile();
|
||||
}
|
||||
|
@ -327,42 +327,42 @@ public class DBTraceObjectManagerTest extends AbstractGhidraHeadlessIntegrationT
|
||||
@Test
|
||||
public void testClear() {
|
||||
populateModel(3);
|
||||
assertEquals(5, manager.getAllObjects().size());
|
||||
assertEquals(5, manager.getObjectCount());
|
||||
|
||||
try (Transaction tx = b.startTransaction()) {
|
||||
manager.clear();
|
||||
}
|
||||
assertEquals(0, manager.getAllObjects().size());
|
||||
assertEquals(0, manager.getObjectCount());
|
||||
|
||||
populateModel(3);
|
||||
assertEquals(5, manager.getAllObjects().size());
|
||||
assertEquals(5, manager.getObjectCount());
|
||||
}
|
||||
|
||||
@Test
|
||||
// @Test // Write-behind cache does not implement undo or redo
|
||||
public void testUndoRedo() throws Exception {
|
||||
populateModel(3);
|
||||
assertEquals(5, manager.getAllObjects().size());
|
||||
assertEquals(5, manager.getObjectCount());
|
||||
|
||||
b.trace.undo();
|
||||
assertEquals(0, manager.getAllObjects().size());
|
||||
assertEquals(0, manager.getObjectCount());
|
||||
|
||||
b.trace.redo();
|
||||
assertEquals(5, manager.getAllObjects().size());
|
||||
assertEquals(5, manager.getObjectCount());
|
||||
}
|
||||
|
||||
@Test
|
||||
// @Test // Write-behind cache does not implement abort
|
||||
public void testAbort() throws Exception {
|
||||
try (Transaction tx = b.startTransaction()) {
|
||||
populateModel(3);
|
||||
assertEquals(5, manager.getAllObjects().size());
|
||||
assertEquals(5, manager.getObjectCount());
|
||||
|
||||
tx.abort();
|
||||
}
|
||||
|
||||
assertEquals(0, manager.getAllObjects().size());
|
||||
assertEquals(0, manager.getObjectCount());
|
||||
|
||||
populateModel(3);
|
||||
assertEquals(5, manager.getAllObjects().size());
|
||||
assertEquals(5, manager.getObjectCount());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -15,13 +15,14 @@
|
||||
*/
|
||||
package ghidra.util;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Comparator;
|
||||
import java.util.*;
|
||||
import java.util.stream.Stream;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
public enum StreamUtils {
|
||||
;
|
||||
public class StreamUtils {
|
||||
private StreamUtils() {
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T> Stream<T> merge(Collection<? extends Stream<? extends T>> streams,
|
||||
Comparator<? super T> comparator) {
|
||||
@ -31,4 +32,9 @@ public enum StreamUtils {
|
||||
return StreamSupport.stream(new MergeSortingSpliterator<>(
|
||||
streams.stream().map(s -> s.spliterator()).toList(), comparator), false);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T> Iterable<T> iter(Stream<? extends T> stream) {
|
||||
return () -> (Iterator<T>) stream.iterator();
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,123 @@
|
||||
/* ###
|
||||
* IP: GHIDRA
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package ghidra.util.database;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.concurrent.locks.ReadWriteLock;
|
||||
|
||||
import ghidra.util.LockHold;
|
||||
|
||||
public class DBSynchronizedCollection<E> implements Collection<E> {
|
||||
private final Collection<E> delegate;
|
||||
private final ReadWriteLock lock;
|
||||
|
||||
public DBSynchronizedCollection(Collection<E> delegate, ReadWriteLock lock) {
|
||||
this.delegate = delegate;
|
||||
this.lock = lock;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
try (LockHold hold = LockHold.lock(lock.readLock())) {
|
||||
return delegate.size();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
try (LockHold hold = LockHold.lock(lock.readLock())) {
|
||||
return delegate.isEmpty();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(Object o) {
|
||||
try (LockHold hold = LockHold.lock(lock.readLock())) {
|
||||
return delegate.contains(o);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<E> iterator() {
|
||||
try (LockHold hold = LockHold.lock(lock.readLock())) {
|
||||
return new DBSynchronizedIterator<>(delegate.iterator(), lock);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object[] toArray() {
|
||||
try (LockHold hold = LockHold.lock(lock.readLock())) {
|
||||
return delegate.toArray();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T[] toArray(T[] a) {
|
||||
try (LockHold hold = LockHold.lock(lock.readLock())) {
|
||||
return delegate.toArray(a);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean add(E e) {
|
||||
try (LockHold hold = LockHold.lock(lock.writeLock())) {
|
||||
return delegate.add(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object o) {
|
||||
try (LockHold hold = LockHold.lock(lock.writeLock())) {
|
||||
return delegate.remove(o);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsAll(Collection<?> c) {
|
||||
try (LockHold hold = LockHold.lock(lock.readLock())) {
|
||||
return delegate.containsAll(c);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAll(Collection<? extends E> c) {
|
||||
try (LockHold hold = LockHold.lock(lock.writeLock())) {
|
||||
return delegate.addAll(c);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean removeAll(Collection<?> c) {
|
||||
try (LockHold hold = LockHold.lock(lock.writeLock())) {
|
||||
return delegate.removeAll(c);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean retainAll(Collection<?> c) {
|
||||
try (LockHold hold = LockHold.lock(lock.writeLock())) {
|
||||
return delegate.retainAll(c);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
try (LockHold hold = LockHold.lock(lock.writeLock())) {
|
||||
delegate.clear();
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,52 @@
|
||||
/* ###
|
||||
* IP: GHIDRA
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package ghidra.util.database;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.concurrent.locks.ReadWriteLock;
|
||||
|
||||
import ghidra.util.LockHold;
|
||||
|
||||
public class DBSynchronizedIterator<T> implements Iterator<T> {
|
||||
private final Iterator<T> iterator;
|
||||
private final ReadWriteLock lock;
|
||||
|
||||
public DBSynchronizedIterator(Iterator<T> iterator, ReadWriteLock lock) {
|
||||
this.iterator = iterator;
|
||||
this.lock = lock;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
try (LockHold hold = LockHold.lock(lock.readLock())) {
|
||||
return iterator.hasNext();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public T next() {
|
||||
try (LockHold hold = LockHold.lock(lock.readLock())) {
|
||||
return iterator.next();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
try (LockHold hold = LockHold.lock(lock.writeLock())) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
}
|
@ -18,12 +18,12 @@ package ghidra.util.database.spatial;
|
||||
import java.lang.reflect.Array;
|
||||
import java.util.*;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.concurrent.locks.ReadWriteLock;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.apache.commons.collections4.IteratorUtils;
|
||||
|
||||
import ghidra.util.LockHold;
|
||||
import ghidra.util.database.DBSynchronizedIterator;
|
||||
import ghidra.util.database.spatial.DBTreeDataRecord.RecordEntry;
|
||||
|
||||
public abstract class AbstractConstraintsTreeSpatialMap< //
|
||||
@ -98,37 +98,6 @@ public abstract class AbstractConstraintsTreeSpatialMap< //
|
||||
return (int) l;
|
||||
}
|
||||
|
||||
protected static class SynchronizedIterator<T> implements Iterator<T> {
|
||||
private final Iterator<T> iterator;
|
||||
private final ReadWriteLock lock;
|
||||
|
||||
public SynchronizedIterator(Iterator<T> iterator, ReadWriteLock lock) {
|
||||
this.iterator = iterator;
|
||||
this.lock = lock;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
try (LockHold hold = LockHold.lock(lock.readLock())) {
|
||||
return iterator.hasNext();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public T next() {
|
||||
try (LockHold hold = LockHold.lock(lock.readLock())) {
|
||||
return iterator.next();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
try (LockHold hold = LockHold.lock(lock.writeLock())) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract static class ToArrayConsumer<A, T, U extends A> implements Consumer<T> {
|
||||
protected final A[] arr;
|
||||
protected int i = 0;
|
||||
@ -172,7 +141,7 @@ public abstract class AbstractConstraintsTreeSpatialMap< //
|
||||
@Override
|
||||
public Iterator<Entry<DS, T>> iterator() {
|
||||
try (LockHold hold = LockHold.lock(tree.dataStore.readLock())) {
|
||||
return new SynchronizedIterator<>(
|
||||
return new DBSynchronizedIterator<>(
|
||||
IteratorUtils.transformedIterator(tree.iterator(query), r -> r.asEntry()),
|
||||
tree.dataStore.getLock());
|
||||
}
|
||||
@ -220,6 +189,12 @@ public abstract class AbstractConstraintsTreeSpatialMap< //
|
||||
public boolean isEmpty() {
|
||||
return AbstractConstraintsTreeSpatialMap.this.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Spliterator<Entry<DS, T>> spliterator() {
|
||||
// Size estimate is more expensive than benefits of knowing size
|
||||
return Spliterators.spliteratorUnknownSize(iterator(), 0);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@ -229,7 +204,7 @@ public abstract class AbstractConstraintsTreeSpatialMap< //
|
||||
@Override
|
||||
public Iterator<Entry<DS, T>> iterator() {
|
||||
try (LockHold hold = LockHold.lock(tree.dataStore.readLock())) {
|
||||
return new SynchronizedIterator<>(
|
||||
return new DBSynchronizedIterator<>(
|
||||
IteratorUtils.transformedIterator(tree.orderedIterator(query),
|
||||
r -> r.asEntry()),
|
||||
tree.dataStore.getLock());
|
||||
@ -245,6 +220,12 @@ public abstract class AbstractConstraintsTreeSpatialMap< //
|
||||
public boolean isEmpty() {
|
||||
return AbstractConstraintsTreeSpatialMap.this.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Spliterator<Entry<DS, T>> spliterator() {
|
||||
// Size estimate is more expensive than benefits of knowing size
|
||||
return Spliterators.spliteratorUnknownSize(iterator(), 0);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@ -254,7 +235,7 @@ public abstract class AbstractConstraintsTreeSpatialMap< //
|
||||
@Override
|
||||
public Iterator<DS> iterator() {
|
||||
try (LockHold hold = LockHold.lock(tree.dataStore.readLock())) {
|
||||
return new SynchronizedIterator<>(
|
||||
return new DBSynchronizedIterator<>(
|
||||
IteratorUtils.transformedIterator(tree.iterator(query), r -> r.getShape()),
|
||||
tree.dataStore.getLock());
|
||||
}
|
||||
@ -301,6 +282,12 @@ public abstract class AbstractConstraintsTreeSpatialMap< //
|
||||
public boolean isEmpty() {
|
||||
return AbstractConstraintsTreeSpatialMap.this.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Spliterator<DS> spliterator() {
|
||||
// Size estimate is more expensive than benefits of knowing size
|
||||
return Spliterators.spliteratorUnknownSize(iterator(), 0);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@ -310,7 +297,7 @@ public abstract class AbstractConstraintsTreeSpatialMap< //
|
||||
@Override
|
||||
public Iterator<DS> iterator() {
|
||||
try (LockHold hold = LockHold.lock(tree.dataStore.readLock())) {
|
||||
return new SynchronizedIterator<>(
|
||||
return new DBSynchronizedIterator<>(
|
||||
IteratorUtils.transformedIterator(tree.orderedIterator(query),
|
||||
r -> r.getShape()),
|
||||
tree.dataStore.getLock());
|
||||
@ -326,6 +313,12 @@ public abstract class AbstractConstraintsTreeSpatialMap< //
|
||||
public boolean isEmpty() {
|
||||
return AbstractConstraintsTreeSpatialMap.this.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Spliterator<DS> spliterator() {
|
||||
// Size estimate is more expensive than benefits of knowing size
|
||||
return Spliterators.spliteratorUnknownSize(iterator(), 0);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@ -335,7 +328,7 @@ public abstract class AbstractConstraintsTreeSpatialMap< //
|
||||
@Override
|
||||
public Iterator<T> iterator() {
|
||||
try (LockHold hold = LockHold.lock(tree.dataStore.readLock())) {
|
||||
return new SynchronizedIterator<>(
|
||||
return new DBSynchronizedIterator<>(
|
||||
IteratorUtils.transformedIterator(tree.iterator(query),
|
||||
r -> r.getRecordValue()),
|
||||
tree.dataStore.getLock());
|
||||
@ -383,6 +376,12 @@ public abstract class AbstractConstraintsTreeSpatialMap< //
|
||||
public boolean isEmpty() {
|
||||
return AbstractConstraintsTreeSpatialMap.this.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Spliterator<T> spliterator() {
|
||||
// Size estimate is more expensive than benefits of knowing size
|
||||
return Spliterators.spliteratorUnknownSize(iterator(), 0);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@ -392,7 +391,7 @@ public abstract class AbstractConstraintsTreeSpatialMap< //
|
||||
@Override
|
||||
public Iterator<T> iterator() {
|
||||
try (LockHold hold = LockHold.lock(tree.dataStore.readLock())) {
|
||||
return new SynchronizedIterator<>(
|
||||
return new DBSynchronizedIterator<>(
|
||||
IteratorUtils.transformedIterator(tree.orderedIterator(query),
|
||||
r -> r.getRecordValue()),
|
||||
tree.dataStore.getLock());
|
||||
@ -408,6 +407,12 @@ public abstract class AbstractConstraintsTreeSpatialMap< //
|
||||
public boolean isEmpty() {
|
||||
return AbstractConstraintsTreeSpatialMap.this.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Spliterator<T> spliterator() {
|
||||
// Size estimate is more expensive than benefits of knowing size
|
||||
return Spliterators.spliteratorUnknownSize(iterator(), 0);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -629,9 +629,9 @@ public class RStarTreeMapTest {
|
||||
private static final int MAX_CHILDREN = 5;
|
||||
private final DBCachedObjectStoreFactory storeFactory;
|
||||
private final IntRStarTree tree;
|
||||
private final SpatialMap<IntRect, String, IntRectQuery> map;
|
||||
public final SpatialMap<IntRect, String, IntRectQuery> map;
|
||||
|
||||
protected MyDomainObject(Object consumer) throws IOException, VersionException {
|
||||
public MyDomainObject(Object consumer) throws IOException, VersionException {
|
||||
super(new DBHandle(), DBOpenMode.CREATE, new ConsoleTaskMonitor(), "Testing", 500, 1000,
|
||||
consumer);
|
||||
storeFactory = new DBCachedObjectStoreFactory(this);
|
||||
|
@ -52,6 +52,17 @@ public abstract class RangeMapSetter<E, D, R, V> {
|
||||
*/
|
||||
protected abstract V getValue(E entry);
|
||||
|
||||
/**
|
||||
* Check if two values are equal
|
||||
*
|
||||
* @param v1 the first value
|
||||
* @param v2 the second value
|
||||
* @return true if equal, false if not
|
||||
*/
|
||||
protected boolean valuesEqual(V v1, V v2) {
|
||||
return Objects.equals(v1, v2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove an entry from the map
|
||||
*
|
||||
@ -190,7 +201,7 @@ public abstract class RangeMapSetter<E, D, R, V> {
|
||||
R r = getRange(entry);
|
||||
int cmpMin = compare(getLower(r), lower);
|
||||
int cmpMax = compare(getUpper(r), upper);
|
||||
boolean sameVal = Objects.equals(getValue(entry), value);
|
||||
boolean sameVal = valuesEqual(getValue(entry), value);
|
||||
if (cmpMin <= 0 && cmpMax >= 0 && sameVal) {
|
||||
return entry; // The value in this range is already set as specified
|
||||
}
|
||||
|
@ -32,7 +32,7 @@ import generic.ULongSpan.*;
|
||||
* impose behaviors and properties that aren't otherwise present on the type of endpoints. For
|
||||
* example, the domain may be {@link Long}s, but using unsigned attributes. The domain also provides
|
||||
* a factory for new spans. While nominally, this only supports closed intervals, the domain can
|
||||
* define a custom endpoint type to obtain mixed intervals, as in {@link End}.
|
||||
* define a custom endpoint type to obtain mixed intervals.
|
||||
*
|
||||
* @param <N> the type of endpoints
|
||||
* @param <S> the type of spans (recursive)
|
||||
@ -387,7 +387,7 @@ public interface Span<N, S extends Span<N, S>> extends Comparable<S> {
|
||||
* existing entries are truncated or deleted (or coalesced if they share the same value as the
|
||||
* new entry) so that the new entry can fit.
|
||||
*
|
||||
* @implNote It is recommended to create an interface (having only the {@link V} parameter)
|
||||
* @implNote It is recommended to create an interface (having only the {@code <V>} parameter)
|
||||
* extending this one specific to your domain and span type, then implement it using
|
||||
* an extension of {@link DefaultSpanMap}. See {@link ULongSpanMap} for an example.
|
||||
* @param <N> the type of endpoints
|
||||
@ -835,7 +835,7 @@ public interface Span<N, S extends Span<N, S>> extends Comparable<S> {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (!(obj instanceof @SuppressWarnings("rawtypes") DefaultSpanMap that)) {
|
||||
if (!(obj instanceof DefaultSpanMap that)) {
|
||||
return false;
|
||||
}
|
||||
if (this.domain != that.domain) {
|
||||
@ -995,7 +995,7 @@ public interface Span<N, S extends Span<N, S>> extends Comparable<S> {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (!(obj instanceof @SuppressWarnings("rawtypes") DefaultSpanSet that)) {
|
||||
if (!(obj instanceof DefaultSpanSet that)) {
|
||||
return false;
|
||||
}
|
||||
if (!Objects.equals(this.map, that.map)) {
|
||||
@ -1081,6 +1081,11 @@ public interface Span<N, S extends Span<N, S>> extends Comparable<S> {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a default {@link #toString} implementation
|
||||
*
|
||||
* @return the string
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
default String doToString() {
|
||||
return domain().toString((S) this);
|
||||
|
@ -63,6 +63,7 @@ dependencies {
|
||||
testImplementation project(path: ':PDB', configuration: 'testArtifacts')
|
||||
testImplementation project(path: ':GnuDemangler', configuration: 'testArtifacts')
|
||||
|
||||
testImplementation project(path: ':ProposedUtils', configuration: 'testArtifacts')
|
||||
testImplementation project(path: ':Framework-TraceModeling', configuration: 'testArtifacts')
|
||||
testImplementation project(path: ':Framework-AsyncComm', configuration: 'testArtifacts')
|
||||
testImplementation project(path: ':Framework-Debugging', configuration: 'testArtifacts')
|
||||
|
@ -1051,7 +1051,7 @@ public abstract class AbstractDebuggerLogicalBreakpointServiceTest<T, MR>
|
||||
waitForPass(() -> assertLogicalBreakpointForMappedBookmarkAnd1TraceBreakpoint(trace1));
|
||||
}
|
||||
|
||||
@Test
|
||||
@Test // Mappings are not write-behind cached
|
||||
public void testAbortAddMapping() throws Throwable {
|
||||
createTarget1();
|
||||
Trace trace = getTrace(target1);
|
||||
@ -1092,7 +1092,7 @@ public abstract class AbstractDebuggerLogicalBreakpointServiceTest<T, MR>
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
// @Test // Not gonna with write-behind cache
|
||||
public void testAbortAddBreakpointAndMapping() throws Throwable {
|
||||
createTarget1();
|
||||
Trace trace = getTrace(target1);
|
||||
@ -1124,7 +1124,7 @@ public abstract class AbstractDebuggerLogicalBreakpointServiceTest<T, MR>
|
||||
assertTrue(one.getTraceBreakpoints().isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
@Test // Abort is on program, not trace
|
||||
public void testAbortAddBookmarks() throws Throwable {
|
||||
createTarget1();
|
||||
Trace trace = getTrace(target1);
|
||||
@ -1152,7 +1152,7 @@ public abstract class AbstractDebuggerLogicalBreakpointServiceTest<T, MR>
|
||||
assertTrue(breakpointService.getAllBreakpoints().isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
// @Test // Not gonna with write-behind cache
|
||||
public void testUndoRedoAddBreakpointAndMapping() throws Throwable {
|
||||
createTarget1();
|
||||
Trace trace = getTrace(target1);
|
||||
@ -1563,8 +1563,11 @@ public abstract class AbstractDebuggerLogicalBreakpointServiceTest<T, MR>
|
||||
/**
|
||||
* With the addition of emulated breakpoints (which are stored in the trace), this test is now
|
||||
* sane.
|
||||
*
|
||||
* <p>
|
||||
* With the addition of the write-behind cache, this test is no longer sane.
|
||||
*/
|
||||
@Test
|
||||
// @Test
|
||||
public void testAbortAddBreakpointSetSleigh() throws Throwable {
|
||||
DebuggerControlService controlService =
|
||||
addPlugin(tool, DebuggerControlServicePlugin.class);
|
||||
|
@ -0,0 +1,628 @@
|
||||
/* ###
|
||||
* IP: GHIDRA
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package ghidra.app.plugin.core.debug.service.tracermi;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import java.io.*;
|
||||
import java.net.*;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.*;
|
||||
|
||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
||||
import db.*;
|
||||
import generic.Unique;
|
||||
import generic.test.rule.Repeated;
|
||||
import ghidra.app.plugin.core.debug.gui.AbstractGhidraHeadedDebuggerIntegrationTest;
|
||||
import ghidra.app.services.TraceRmiService;
|
||||
import ghidra.dbg.target.schema.XmlSchemaContext;
|
||||
import ghidra.debug.api.tracermi.TraceRmiAcceptor;
|
||||
import ghidra.debug.api.tracermi.TraceRmiConnection;
|
||||
import ghidra.framework.plugintool.PluginTool;
|
||||
import ghidra.program.model.address.*;
|
||||
import ghidra.rmi.trace.TraceRmi.*;
|
||||
import ghidra.rmi.trace.TraceRmi.Compiler;
|
||||
import ghidra.rmi.trace.TraceRmi.RootMessage.MsgCase;
|
||||
import ghidra.trace.database.ToyDBTraceBuilder;
|
||||
import ghidra.trace.database.target.DBTraceObject;
|
||||
import ghidra.trace.model.Lifespan;
|
||||
import ghidra.trace.model.Trace;
|
||||
import ghidra.trace.model.target.TraceObject.ConflictResolution;
|
||||
import ghidra.trace.model.target.TraceObjectKeyPath;
|
||||
import ghidra.util.LockHold;
|
||||
import ghidra.util.Msg;
|
||||
import ghidra.util.database.*;
|
||||
import ghidra.util.database.annot.*;
|
||||
import ghidra.util.database.spatial.RStarTreeMapTest.IntRect;
|
||||
import ghidra.util.database.spatial.RStarTreeMapTest.MyDomainObject;
|
||||
import ghidra.util.task.TaskMonitor;
|
||||
|
||||
@Ignore // Only want for manual testing
|
||||
public class TraceRmiPerformanceTest extends AbstractGhidraHeadedDebuggerIntegrationTest {
|
||||
public static final int REGION_COUNT = 1000;
|
||||
public static final int RECORD_COUNT = 1000 * 6;
|
||||
|
||||
interface Tx extends AutoCloseable {
|
||||
}
|
||||
|
||||
interface TraceHandle extends AutoCloseable {
|
||||
Trace getTrace() throws Throwable;
|
||||
|
||||
Tx startTx() throws Throwable;
|
||||
|
||||
default void addLotsOfRegions() throws Throwable {
|
||||
try (Tx tx = startTx()) {
|
||||
AddressFactory af = getTrace().getBaseAddressFactory();
|
||||
AddressSpace space = af.getDefaultAddressSpace();
|
||||
for (int i = 0; i < REGION_COUNT; i++) {
|
||||
AddressRange range = new AddressRangeImpl(space.getAddress(i * 1024), 1024);
|
||||
addRegion(Integer.toString(i), range, "rw");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void addRegion(String key, AddressRange range, String perms) throws Throwable;
|
||||
}
|
||||
|
||||
interface TraceMaker {
|
||||
TraceHandle createTrace() throws Throwable;
|
||||
}
|
||||
|
||||
static class ApiTx implements Tx {
|
||||
private final Transaction tx;
|
||||
|
||||
private ApiTx(Transaction tx) {
|
||||
this.tx = tx;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
tx.close();
|
||||
}
|
||||
}
|
||||
|
||||
static class ApiTraceHandle implements TraceHandle {
|
||||
private final ToyDBTraceBuilder tb;
|
||||
|
||||
public ApiTraceHandle(ToyDBTraceBuilder tb) {
|
||||
this.tb = tb;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Trace getTrace() throws Throwable {
|
||||
return tb.trace;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Tx startTx() throws Throwable {
|
||||
return new ApiTx(tb.trace.openTransaction("Test"));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addRegion(String key, AddressRange range, String perms) {
|
||||
DBTraceObject region = tb.trace.getObjectManager()
|
||||
.createObject(TraceObjectKeyPath.parse("Processes[0].Memory[" + key + "]"));
|
||||
region.setValue(Lifespan.nowOn(0), "Range", range);
|
||||
region.setValue(Lifespan.nowOn(0), "R", perms.contains("r"));
|
||||
region.setValue(Lifespan.nowOn(0), "W", perms.contains("w"));
|
||||
region.setValue(Lifespan.nowOn(0), "X", perms.contains("x"));
|
||||
region.insert(Lifespan.nowOn(0), ConflictResolution.ADJUST);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
tb.close();
|
||||
}
|
||||
}
|
||||
|
||||
class ApiTraceMaker implements TraceMaker {
|
||||
@Override
|
||||
public TraceHandle createTrace() throws Throwable {
|
||||
tb = new ToyDBTraceBuilder(name.getMethodName(), LANGID_TOYBE64);
|
||||
try (Transaction tx = tb.startTransaction()) {
|
||||
tb.trace.getObjectManager().createRootObject(SCHEMA_SESSION);
|
||||
}
|
||||
return new ApiTraceHandle(tb);
|
||||
}
|
||||
}
|
||||
|
||||
static class RmiTraceHandle implements TraceHandle {
|
||||
private static final int RESULT_COUNT = REGION_COUNT * 6 + 2;
|
||||
private final TraceRmiConnection connection;
|
||||
private final Socket client;
|
||||
|
||||
private final List<CompletableFuture<?>> results = new ArrayList<>(RESULT_COUNT * 5);
|
||||
private final OutputStream out;
|
||||
private final InputStream in;
|
||||
private final ExecutorService receiver = Executors.newSingleThreadExecutor();
|
||||
|
||||
private int seq = 1;
|
||||
private int txid = 1000;
|
||||
|
||||
public RmiTraceHandle(TraceRmiConnection connection, Socket client) throws IOException {
|
||||
this.connection = connection;
|
||||
this.client = client;
|
||||
this.out = client.getOutputStream();
|
||||
this.in = client.getInputStream();
|
||||
}
|
||||
|
||||
private CompletableFuture<RootMessage> receive() {
|
||||
return CompletableFuture.supplyAsync(() -> {
|
||||
try {
|
||||
return TraceRmiHandler.recvDelimited(in);
|
||||
}
|
||||
catch (IOException e) {
|
||||
return ExceptionUtils.rethrow(e);
|
||||
}
|
||||
}, receiver);
|
||||
}
|
||||
|
||||
private CompletableFuture<RootMessage> request(RootMessage msg) throws IOException {
|
||||
TraceRmiHandler.sendDelimited(out, msg, seq++);
|
||||
return receive();
|
||||
}
|
||||
|
||||
private static void assertMsgCase(MsgCase expected, RootMessage msg) {
|
||||
if (msg.getMsgCase() == MsgCase.ERROR) {
|
||||
throw new AssertionError("Got RMI error: " + msg.getError().getMessage());
|
||||
}
|
||||
assertEquals(expected, msg.getMsgCase());
|
||||
}
|
||||
|
||||
private CompletableFuture<Void> finishNegotiate() {
|
||||
return receive().thenAccept(reply -> {
|
||||
assertMsgCase(MsgCase.REPLY_NEGOTIATE, reply);
|
||||
});
|
||||
}
|
||||
|
||||
class RmiTx implements Tx {
|
||||
private final TxId txid;
|
||||
|
||||
public RmiTx(TxId txid) {
|
||||
this.txid = txid;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
results.add(doEndTx(txid));
|
||||
|
||||
assertEquals(RESULT_COUNT, results.size());
|
||||
for (CompletableFuture<?> r : results) {
|
||||
r.get(1000, TimeUnit.MILLISECONDS);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected TxId nextTx() {
|
||||
return TxId.newBuilder().setId(txid).build();
|
||||
}
|
||||
|
||||
protected CompletableFuture<Void> doStartTx(TxId txid) throws IOException {
|
||||
RootMessage msg = RootMessage.newBuilder()
|
||||
.setRequestStartTx(RequestStartTx.newBuilder()
|
||||
.setDescription("Test")
|
||||
.setOid(DomObjId.newBuilder().setId(0))
|
||||
.setTxid(txid))
|
||||
.build();
|
||||
return request(msg).thenAccept(reply -> {
|
||||
assertMsgCase(MsgCase.REPLY_START_TX, reply);
|
||||
});
|
||||
}
|
||||
|
||||
protected CompletableFuture<Void> doEndTx(TxId txid) throws IOException {
|
||||
RootMessage msg = RootMessage.newBuilder()
|
||||
.setRequestEndTx(RequestEndTx.newBuilder()
|
||||
.setAbort(false)
|
||||
.setOid(DomObjId.newBuilder().setId(0))
|
||||
.setTxid(txid))
|
||||
.build();
|
||||
return request(msg).thenAccept(reply -> {
|
||||
assertMsgCase(MsgCase.REPLY_END_TX, reply);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public Tx startTx() throws Throwable {
|
||||
TxId txid = nextTx();
|
||||
results.add(doStartTx(txid));
|
||||
return new RmiTx(txid);
|
||||
}
|
||||
|
||||
private CompletableFuture<Void> createTrace(String name) throws IOException {
|
||||
RootMessage msg = RootMessage.newBuilder()
|
||||
.setRequestCreateTrace(RequestCreateTrace.newBuilder()
|
||||
.setPath(FilePath.newBuilder().setPath("Test/" + name))
|
||||
.setLanguage(Language.newBuilder().setId(LANGID_TOYBE64))
|
||||
.setCompiler(Compiler.newBuilder().setId("default"))
|
||||
.setOid(DomObjId.newBuilder().setId(0)))
|
||||
.build();
|
||||
return request(msg).thenAccept(reply -> {
|
||||
assertMsgCase(MsgCase.REPLY_CREATE_TRACE, reply);
|
||||
});
|
||||
}
|
||||
|
||||
private CompletableFuture<Void> createRootObject() throws IOException {
|
||||
RootMessage msg = RootMessage.newBuilder()
|
||||
.setRequestCreateRootObject(RequestCreateRootObject.newBuilder()
|
||||
.setOid(DomObjId.newBuilder().setId(0))
|
||||
.setSchemaContext(XmlSchemaContext.serialize(SCHEMA_CTX))
|
||||
.setRootSchema(SCHEMA_SESSION.getName().toString()))
|
||||
.build();
|
||||
return request(msg).thenAccept(reply -> {
|
||||
assertMsgCase(MsgCase.REPLY_CREATE_OBJECT, reply);
|
||||
});
|
||||
}
|
||||
|
||||
private CompletableFuture<Void> requestSetValue(DomObjId oid, ObjPath path,
|
||||
Lifespan lifespan, String key, Object value) throws IOException {
|
||||
RootMessage msg = RootMessage.newBuilder()
|
||||
.setRequestSetValue(RequestSetValue.newBuilder()
|
||||
.setOid(oid)
|
||||
.setValue(ValSpec.newBuilder()
|
||||
.setParent(ObjSpec.newBuilder()
|
||||
.setPath(path))
|
||||
.setKey(key)
|
||||
.setSpan(TraceRmiHandler.makeSpan(lifespan))
|
||||
.setValue(TraceRmiHandler.makeValue(value)))
|
||||
.setResolution(Resolution.CR_ADJUST))
|
||||
.build();
|
||||
return request(msg).thenAccept(reply -> {
|
||||
assertMsgCase(MsgCase.REPLY_SET_VALUE, reply);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addRegion(String key, AddressRange range, String perms) throws IOException {
|
||||
DomObjId oid0 = DomObjId.newBuilder().setId(0).build();
|
||||
ObjPath path = ObjPath.newBuilder().setPath("Processes[0].Memory[" + key + "]").build();
|
||||
RootMessage msgCreate = RootMessage.newBuilder()
|
||||
.setRequestCreateObject(RequestCreateObject.newBuilder()
|
||||
.setOid(oid0)
|
||||
.setPath(path))
|
||||
.build();
|
||||
results.add(request(msgCreate).thenAccept(reply -> {
|
||||
assertMsgCase(MsgCase.REPLY_CREATE_OBJECT, reply);
|
||||
}));
|
||||
|
||||
results.add(requestSetValue(oid0, path, Lifespan.nowOn(0), "Range", range));
|
||||
results.add(requestSetValue(oid0, path, Lifespan.nowOn(0), "R", perms.contains("r")));
|
||||
results.add(requestSetValue(oid0, path, Lifespan.nowOn(0), "W", perms.contains("w")));
|
||||
results.add(requestSetValue(oid0, path, Lifespan.nowOn(0), "X", perms.contains("x")));
|
||||
|
||||
RootMessage msgInsert = RootMessage.newBuilder()
|
||||
.setRequestInsertObject(RequestInsertObject.newBuilder()
|
||||
.setOid(oid0)
|
||||
.setObject(ObjSpec.newBuilder().setPath(path))
|
||||
.setSpan(TraceRmiHandler.makeSpan(Lifespan.nowOn(0)))
|
||||
.setResolution(Resolution.CR_ADJUST))
|
||||
.build();
|
||||
results.add(request(msgInsert).thenAccept(reply -> {
|
||||
assertMsgCase(MsgCase.REPLY_INSERT_OBJECT, reply);
|
||||
}));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Trace getTrace() throws Throwable {
|
||||
return Unique.assertOne(connection.getTargets()).getTrace();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
client.close();
|
||||
connection.close();
|
||||
}
|
||||
}
|
||||
|
||||
class RmiTraceMaker implements TraceMaker {
|
||||
protected TraceRmiService getService() throws Throwable {
|
||||
TraceRmiService service = tool.getService(TraceRmiService.class);
|
||||
if (service != null) {
|
||||
return service;
|
||||
}
|
||||
return addPlugin(tool, TraceRmiPlugin.class);
|
||||
}
|
||||
|
||||
private void startNegotiate(OutputStream out) throws IOException {
|
||||
RootMessage msg = RootMessage.newBuilder()
|
||||
.setRequestNegotiate(RequestNegotiate.newBuilder()
|
||||
.setDescription("Test")
|
||||
.setVersion(TraceRmiHandler.VERSION)
|
||||
.addAllMethods(List.of()))
|
||||
.build();
|
||||
TraceRmiHandler.sendDelimited(out, msg, 0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TraceHandle createTrace() throws Throwable {
|
||||
TraceRmiService service = getService();
|
||||
TraceRmiAcceptor acceptor = service.acceptOne(new InetSocketAddress("localhost", 0));
|
||||
Socket client = new Socket();
|
||||
|
||||
client.connect(acceptor.getAddress());
|
||||
startNegotiate(client.getOutputStream());
|
||||
TraceRmiConnection connection = acceptor.accept();
|
||||
RmiTraceHandle th = new RmiTraceHandle(connection, client);
|
||||
th.finishNegotiate();
|
||||
|
||||
TxId txid = th.nextTx();
|
||||
th.createTrace(name.getMethodName()).get(10000, TimeUnit.MILLISECONDS);
|
||||
th.doStartTx(txid).get(1000, TimeUnit.MILLISECONDS);
|
||||
th.createRootObject().get(1000, TimeUnit.MILLISECONDS);
|
||||
th.doEndTx(txid).get(1000, TimeUnit.MILLISECONDS);
|
||||
tb = new ToyDBTraceBuilder(th.getTrace());
|
||||
return th;
|
||||
}
|
||||
}
|
||||
|
||||
class NerfedRmiTraceMaker extends RmiTraceMaker {
|
||||
@Override
|
||||
protected TraceRmiService getService() throws Throwable {
|
||||
tool.removePlugins(tool.getManagedPlugins()
|
||||
.stream()
|
||||
.filter(p -> p instanceof TraceRmiPlugin)
|
||||
.toList());
|
||||
return addPlugin(tool, NerfedTraceRmiPlugin.class);
|
||||
}
|
||||
}
|
||||
|
||||
static class TimeMe implements AutoCloseable {
|
||||
private final String name;
|
||||
private final long start;
|
||||
|
||||
public TimeMe(String name) {
|
||||
this.name = name;
|
||||
Msg.info(this, "Starting: " + name);
|
||||
this.start = System.currentTimeMillis();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
long elapsed = System.currentTimeMillis() - start;
|
||||
Msg.info(this, "Finished: " + name + ". Took " + elapsed + " ms");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
@Repeated(20) // First time will measure warm-up
|
||||
public void testMeasureViaApiIsolated() throws Throwable {
|
||||
TraceMaker tm = new ApiTraceMaker();
|
||||
try (TraceHandle th = tm.createTrace()) {
|
||||
try (TimeMe __ = new TimeMe("Add via API. Isolated.")) {
|
||||
th.addLotsOfRegions();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
@Repeated(2) // First time will measure warm-up
|
||||
public void testMeasureViaRmiIsolated() throws Throwable {
|
||||
TraceMaker tm = new RmiTraceMaker();
|
||||
try (TraceHandle th = tm.createTrace()) {
|
||||
try (TimeMe __ = new TimeMe("Add via RMI. Isolated.")) {
|
||||
th.addLotsOfRegions();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static class NerfedTraceRmiHandler extends TraceRmiHandler {
|
||||
public NerfedTraceRmiHandler(TraceRmiPlugin plugin, Socket socket) throws IOException {
|
||||
super(plugin, socket);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ReplyCreateObject handleCreateObject(RequestCreateObject req) {
|
||||
return ReplyCreateObject.getDefaultInstance();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ReplySetValue handleSetValue(RequestSetValue req)
|
||||
throws AddressOverflowException {
|
||||
return ReplySetValue.getDefaultInstance();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ReplyInsertObject handleInsertObject(RequestInsertObject req) {
|
||||
return ReplyInsertObject.getDefaultInstance();
|
||||
}
|
||||
}
|
||||
|
||||
static class NerfedTraceRmiAcceptor extends DefaultTraceRmiAcceptor {
|
||||
public NerfedTraceRmiAcceptor(TraceRmiPlugin plugin, SocketAddress address) {
|
||||
super(plugin, address);
|
||||
}
|
||||
|
||||
@Override
|
||||
public NerfedTraceRmiHandler doAccept(TraceRmiAcceptor acceptor)
|
||||
throws IOException {
|
||||
Socket client = socket.accept();
|
||||
NerfedTraceRmiHandler handler = new NerfedTraceRmiHandler(plugin, client);
|
||||
handler.start();
|
||||
plugin.listeners.invoke().connected(handler, getConnectMode(), acceptor);
|
||||
return handler;
|
||||
}
|
||||
}
|
||||
|
||||
public static class NerfedTraceRmiPlugin extends TraceRmiPlugin {
|
||||
public NerfedTraceRmiPlugin(PluginTool tool) {
|
||||
super(tool);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DefaultTraceRmiAcceptor acceptOne(SocketAddress address) throws IOException {
|
||||
NerfedTraceRmiAcceptor acceptor = new NerfedTraceRmiAcceptor(this, address);
|
||||
acceptor.start();
|
||||
listeners.invoke().waitingAccept(acceptor);
|
||||
return acceptor;
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
@Repeated(2)
|
||||
public void testMeasureRmiNetworkOnly() throws Throwable {
|
||||
TraceMaker tm = new NerfedRmiTraceMaker();
|
||||
try (TraceHandle th = tm.createTrace()) {
|
||||
try (TimeMe __ = new TimeMe("Add via RMI. Isolated.")) {
|
||||
th.addLotsOfRegions();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
@Repeated(2)
|
||||
public void testMeasurePlainTable() throws Exception {
|
||||
DBHandle handle = new DBHandle();
|
||||
// Some comparable number of fields to a DBTraceObjectValue
|
||||
Schema schema = new SchemaBuilder()
|
||||
.field("Parent", LongField.class)
|
||||
.field("EntryKey", StringField.class)
|
||||
.field("MinSnap", LongField.class)
|
||||
.field("MaxSnap", LongField.class)
|
||||
.field("Value", BinaryField.class)
|
||||
.build();
|
||||
|
||||
try (TimeMe tm = new TimeMe("Plain table")) {
|
||||
try (Transaction tx = handle.openTransaction(null)) {
|
||||
Table table = handle.createTable("Test", schema);
|
||||
for (int i = 0; i < RECORD_COUNT; i++) {
|
||||
DBRecord rec = schema.createRecord(0);
|
||||
rec.setLongValue(0, 0);
|
||||
rec.setString(1, "Whatever");
|
||||
rec.setLongValue(2, 0);
|
||||
rec.setLongValue(3, Long.MAX_VALUE);
|
||||
|
||||
ByteBuffer rangeEnc = ByteBuffer.allocate(18);
|
||||
rangeEnc.putShort((short) 0x204); // Made up "space id"
|
||||
rangeEnc.putLong(1024 * i);
|
||||
rangeEnc.putLong(1024 * i + 1023);
|
||||
rec.setBinaryData(4, rangeEnc.array());
|
||||
|
||||
table.putRecord(rec);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@DBAnnotatedObjectInfo(version = 0)
|
||||
public static class TestObject extends DBAnnotatedObject {
|
||||
|
||||
static final String NAME_PARENT = "Parent";
|
||||
static final String NAME_ENTRY_KEY = "EntryKey";
|
||||
static final String NAME_MIN_SNAP = "MinSnap";
|
||||
static final String NAME_MAX_SNAP = "MaxSnap";
|
||||
static final String NAME_VALUE = "Value";
|
||||
|
||||
@DBAnnotatedColumn(NAME_PARENT)
|
||||
static DBObjectColumn COL_PARENT;
|
||||
@DBAnnotatedColumn(NAME_ENTRY_KEY)
|
||||
static DBObjectColumn COL_ENTRY_KEY;
|
||||
@DBAnnotatedColumn(NAME_MIN_SNAP)
|
||||
static DBObjectColumn COL_MIN_SNAP;
|
||||
@DBAnnotatedColumn(NAME_MAX_SNAP)
|
||||
static DBObjectColumn COL_MAX_SNAP;
|
||||
@DBAnnotatedColumn(NAME_VALUE)
|
||||
static DBObjectColumn COL_VALUE;
|
||||
|
||||
@DBAnnotatedField(column = NAME_PARENT)
|
||||
private long parent;
|
||||
@DBAnnotatedField(column = NAME_ENTRY_KEY)
|
||||
private String entryKey;
|
||||
@DBAnnotatedField(column = NAME_MIN_SNAP)
|
||||
private long minSnap;
|
||||
@DBAnnotatedField(column = NAME_MAX_SNAP)
|
||||
private long maxSnap;
|
||||
@DBAnnotatedField(column = NAME_VALUE)
|
||||
private byte[] value;
|
||||
|
||||
protected TestObject(DBCachedObjectStore<?> store, DBRecord record) {
|
||||
super(store, record);
|
||||
}
|
||||
|
||||
public void set(long parent, String entryKey, long minSnap, long maxSnap, byte[] value) {
|
||||
this.parent = parent;
|
||||
this.entryKey = entryKey;
|
||||
this.minSnap = minSnap;
|
||||
this.maxSnap = maxSnap;
|
||||
this.value = value;
|
||||
|
||||
update(COL_PARENT, COL_ENTRY_KEY, COL_MIN_SNAP, COL_MAX_SNAP, COL_VALUE);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TestDomainObject extends DBCachedDomainObjectAdapter {
|
||||
protected TestDomainObject(DBHandle dbh, DBOpenMode openMode, TaskMonitor monitor,
|
||||
String name, int timeInterval, int bufSize, Object consumer) {
|
||||
super(dbh, openMode, monitor, name, timeInterval, bufSize, consumer);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isChangeable() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "Test Domain Object";
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
@Repeated(2)
|
||||
public void testMeasureObjectStore() throws Exception {
|
||||
DBHandle handle = new DBHandle();
|
||||
TestDomainObject domObj =
|
||||
new TestDomainObject(handle, DBOpenMode.CREATE, monitor, "Test", 500, 1000, this);
|
||||
DBCachedObjectStoreFactory factory = new DBCachedObjectStoreFactory(domObj);
|
||||
|
||||
try (TimeMe tm = new TimeMe("Object Store")) {
|
||||
try (Transaction tx = domObj.openTransaction("Test")) {
|
||||
DBCachedObjectStore<TestObject> store = factory.getOrCreateCachedStore("Test",
|
||||
TestObject.class, TestObject::new, false);
|
||||
for (int i = 0; i < RECORD_COUNT; i++) {
|
||||
try (LockHold hold = LockHold.lock(domObj.getReadWriteLock().writeLock())) {
|
||||
TestObject obj = store.create();
|
||||
|
||||
ByteBuffer rangeEnc = ByteBuffer.allocate(18);
|
||||
rangeEnc.putShort((short) 0x204); // Made up "space id"
|
||||
rangeEnc.putLong(1024 * i);
|
||||
rangeEnc.putLong(1024 * i + 1023);
|
||||
obj.set(0, "Whatever", 0, Long.MAX_VALUE, rangeEnc.array());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
@Repeated(20)
|
||||
public void testMeasureRTree() throws Exception {
|
||||
MyDomainObject domObj = new MyDomainObject(this);
|
||||
|
||||
try (TimeMe tm = new TimeMe("Object Store")) {
|
||||
try (Transaction tx = domObj.openTransaction("Test")) {
|
||||
for (int i = 0; i < RECORD_COUNT; i++) {
|
||||
domObj.map.put(
|
||||
IntRect.ALL.immutable(0, Integer.MAX_VALUE, i * 1024, i * 1024 + 1023),
|
||||
"Whatever");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user