diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 334e4168..cc7d56b0 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -37,12 +37,11 @@ import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.sampler.ThreadGrouper; import me.lucko.spark.common.sampler.async.AsyncSampler; -import me.lucko.spark.common.sampler.node.MergeMode; +import me.lucko.spark.common.sampler.java.MergeStrategy; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.FormatUtil; import me.lucko.spark.common.util.MediaTypes; -import me.lucko.spark.common.util.MethodDisambiguator; import me.lucko.spark.common.ws.ViewerSocket; import me.lucko.spark.proto.SparkSamplerProtos; import net.kyori.adventure.text.Component; @@ -507,12 +506,7 @@ private Sampler.ExportProps getExportProps(SparkPlatform platform, CommandRespon return new Sampler.ExportProps() .creator(resp.senderData()) .comment(Iterables.getFirst(arguments.stringFlag("comment"), null)) - .mergeMode(() -> { - MethodDisambiguator methodDisambiguator = new MethodDisambiguator(platform.createClassFinder()); - return arguments.boolFlag("separate-parent-calls") - ? MergeMode.separateParentCalls(methodDisambiguator) - : MergeMode.sameMethod(methodDisambiguator); - }) + .mergeStrategy(arguments.boolFlag("separate-parent-calls") ? MergeStrategy.SEPARATE_PARENT_CALLS : MergeStrategy.SAME_METHOD) .classSourceLookup(() -> ClassSourceLookup.create(platform)); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index 20e7973e..d76b1a16 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -25,8 +25,8 @@ import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; import me.lucko.spark.common.platform.SparkMetadata; import me.lucko.spark.common.sampler.aggregator.DataAggregator; -import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; +import me.lucko.spark.common.sampler.node.exporter.NodeExporter; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; import me.lucko.spark.common.sampler.window.WindowStatisticsCollector; @@ -42,6 +42,7 @@ import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.function.Function; import java.util.function.Supplier; /** @@ -198,7 +199,7 @@ protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform pla proto.setMetadata(metadata); } - protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, MergeMode mergeMode, ClassSourceLookup classSourceLookup, Supplier classFinderSupplier) { + protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, Function nodeExporterFunction, ClassSourceLookup classSourceLookup, Supplier classFinderSupplier) { List data = dataAggregator.exportData(); data.sort(Comparator.comparing(ThreadNode::getThreadLabel)); @@ -213,8 +214,10 @@ protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAg this.windowStatisticsCollector.ensureHasStatisticsForAllWindows(timeWindows); proto.putAllTimeWindowStatistics(this.windowStatisticsCollector.export()); + NodeExporter exporter = nodeExporterFunction.apply(timeEncoder); + for (ThreadNode entry : data) { - proto.addThreads(entry.toProto(mergeMode, timeEncoder)); + proto.addThreads(exporter.export(entry)); classSourceVisitor.visit(entry); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index bb74cd25..5aca704b 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -22,7 +22,7 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; -import me.lucko.spark.common.sampler.node.MergeMode; +import me.lucko.spark.common.sampler.java.MergeStrategy; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.ws.ViewerSocket; import me.lucko.spark.proto.SparkProtos; @@ -111,7 +111,7 @@ public interface Sampler { final class ExportProps { private CommandSender.Data creator; private String comment; - private Supplier mergeMode; + private MergeStrategy mergeStrategy; private Supplier classSourceLookup; private SocketChannelInfo channelInfo; @@ -126,8 +126,8 @@ public String comment() { return this.comment; } - public Supplier mergeMode() { - return this.mergeMode; + public MergeStrategy mergeStrategy() { + return this.mergeStrategy; } public Supplier classSourceLookup() { @@ -148,8 +148,8 @@ public ExportProps comment(String comment) { return this; } - public ExportProps mergeMode(Supplier mergeMode) { - this.mergeMode = mergeMode; + public ExportProps mergeStrategy(MergeStrategy mergeStrategy) { + this.mergeStrategy = mergeStrategy; return this; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java index b9a80e04..484493a3 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java @@ -33,7 +33,7 @@ public class AsyncDataAggregator extends AbstractDataAggregator { /** A describer for async-profiler stack trace elements. */ private static final StackTraceNode.Describer STACK_TRACE_DESCRIBER = (element, parent) -> - new StackTraceNode.Description(element.getClassName(), element.getMethodName(), element.getMethodDescription()); + new StackTraceNode.AsyncDescription(element.getClassName(), element.getMethodName(), element.getMethodDescription()); protected AsyncDataAggregator(ThreadGrouper threadGrouper) { super(threadGrouper); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncNodeExporter.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncNodeExporter.java new file mode 100644 index 00000000..ef68c460 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncNodeExporter.java @@ -0,0 +1,63 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.async; + +import me.lucko.spark.common.sampler.node.StackTraceNode; +import me.lucko.spark.common.sampler.node.exporter.AbstractNodeExporter; +import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; +import me.lucko.spark.proto.SparkSamplerProtos; + +import java.util.Collection; + +/** + * Node exporter for the {@link AsyncSampler}. + */ +public class AsyncNodeExporter extends AbstractNodeExporter { + public AsyncNodeExporter(ProtoTimeEncoder timeEncoder) { + super(timeEncoder); + } + + @Override + protected SparkSamplerProtos.StackTraceNode export(StackTraceNode stackTraceNode, Iterable childrenRefs) { + SparkSamplerProtos.StackTraceNode.Builder proto = SparkSamplerProtos.StackTraceNode.newBuilder() + .setClassName(stackTraceNode.getClassName()) + .setMethodName(stackTraceNode.getMethodName()); + + double[] times = stackTraceNode.encodeTimesForProto(this.timeEncoder); + for (double time : times) { + proto.addTimes(time); + } + + String methodDescription = stackTraceNode.getMethodDescription(); + if (methodDescription != null) { + proto.setMethodDesc(methodDescription); + } + + proto.addAllChildrenRefs(childrenRefs); + + return proto.build(); + } + + @Override + protected Collection exportChildren(Collection children) { + return children; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index 5350558e..62af021e 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -221,7 +221,7 @@ public SamplerData toProto(SparkPlatform platform, ExportProps exportProps) { proto.setChannelInfo(exportProps.channelInfo()); } writeMetadataToProto(proto, platform, exportProps.creator(), exportProps.comment(), this.dataAggregator); - writeDataToProto(proto, this.dataAggregator, exportProps.mergeMode().get(), exportProps.classSourceLookup().get(), platform::createClassFinder); + writeDataToProto(proto, this.dataAggregator, AsyncNodeExporter::new, exportProps.classSourceLookup().get(), platform::createClassFinder); return proto.build(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java index b7b841ea..5b6a4705 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java @@ -39,7 +39,7 @@ public abstract class JavaDataAggregator extends AbstractDataAggregator { /** A describer for java.lang.StackTraceElement */ private static final StackTraceNode.Describer STACK_TRACE_DESCRIBER = (element, parent) -> { int parentLineNumber = parent == null ? StackTraceNode.NULL_LINE_NUMBER : parent.getLineNumber(); - return new StackTraceNode.Description(element.getClassName(), element.getMethodName(), element.getLineNumber(), parentLineNumber); + return new StackTraceNode.JavaDescription(element.getClassName(), element.getMethodName(), element.getLineNumber(), parentLineNumber); }; /** The worker pool for inserting stack nodes */ diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaNodeExporter.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaNodeExporter.java new file mode 100644 index 00000000..c1100860 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaNodeExporter.java @@ -0,0 +1,97 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.java; + +import me.lucko.spark.common.sampler.node.StackTraceNode; +import me.lucko.spark.common.sampler.node.exporter.AbstractNodeExporter; +import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; +import me.lucko.spark.common.util.MethodDisambiguator; +import me.lucko.spark.proto.SparkSamplerProtos; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +/** + * Node exporter for the {@link JavaSampler}. + */ +public class JavaNodeExporter extends AbstractNodeExporter { + private final MergeStrategy mergeStrategy; + private final MethodDisambiguator methodDisambiguator; + + public JavaNodeExporter(ProtoTimeEncoder timeEncoder, MergeStrategy mergeStrategy, MethodDisambiguator methodDisambiguator) { + super(timeEncoder); + this.mergeStrategy = mergeStrategy; + this.methodDisambiguator = methodDisambiguator; + } + + protected SparkSamplerProtos.StackTraceNode export(StackTraceNode stackTraceNode, Iterable childrenRefs) { + SparkSamplerProtos.StackTraceNode.Builder proto = SparkSamplerProtos.StackTraceNode.newBuilder() + .setClassName(stackTraceNode.getClassName()) + .setMethodName(stackTraceNode.getMethodName()); + + double[] times = stackTraceNode.encodeTimesForProto(this.timeEncoder); + for (double time : times) { + proto.addTimes(time); + } + + int lineNumber = stackTraceNode.getLineNumber(); + if (lineNumber >= 0) { + proto.setLineNumber(lineNumber); + } + + if (this.mergeStrategy.separateParentCalls()) { + int parentLineNumber = stackTraceNode.getParentLineNumber(); + if (parentLineNumber >= 0) { + proto.setParentLineNumber(parentLineNumber); + } + } + + this.methodDisambiguator.disambiguate(stackTraceNode) + .map(MethodDisambiguator.MethodDescription::getDescription) + .ifPresent(proto::setMethodDesc); + + proto.addAllChildrenRefs(childrenRefs); + + return proto.build(); + } + + @Override + protected Collection exportChildren(Collection children) { + if (children.isEmpty()) { + return children; + } + + List list = new ArrayList<>(children.size()); + + outer: + for (StackTraceNode child : children) { + for (StackTraceNode other : list) { + if (this.mergeStrategy.shouldMerge(this.methodDisambiguator, other, child)) { + other.merge(child); + continue outer; + } + } + list.add(child); + } + return list; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index e3ae73a0..20f93835 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -28,6 +28,7 @@ import me.lucko.spark.common.sampler.window.ProfilingWindowUtils; import me.lucko.spark.common.sampler.window.WindowStatisticsCollector; import me.lucko.spark.common.tick.TickHook; +import me.lucko.spark.common.util.MethodDisambiguator; import me.lucko.spark.common.util.SparkThreadFactory; import me.lucko.spark.common.ws.ViewerSocket; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; @@ -192,8 +193,12 @@ public SamplerData toProto(SparkPlatform platform, ExportProps exportProps) { if (exportProps.channelInfo() != null) { proto.setChannelInfo(exportProps.channelInfo()); } + writeMetadataToProto(proto, platform, exportProps.creator(), exportProps.comment(), this.dataAggregator); - writeDataToProto(proto, this.dataAggregator, exportProps.mergeMode().get(), exportProps.classSourceLookup().get(), platform::createClassFinder); + + MethodDisambiguator methodDisambiguator = new MethodDisambiguator(platform.createClassFinder()); + writeDataToProto(proto, this.dataAggregator, timeEncoder -> new JavaNodeExporter(timeEncoder, exportProps.mergeStrategy(), methodDisambiguator), exportProps.classSourceLookup().get(), platform::createClassFinder); + return proto.build(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/MergeMode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/MergeStrategy.java similarity index 64% rename from spark-common/src/main/java/me/lucko/spark/common/sampler/node/MergeMode.java rename to spark-common/src/main/java/me/lucko/spark/common/sampler/java/MergeStrategy.java index 18a0ed31..eac1c6fc 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/MergeMode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/MergeStrategy.java @@ -18,37 +18,27 @@ * along with this program. If not, see . */ -package me.lucko.spark.common.sampler.node; +package me.lucko.spark.common.sampler.java; +import me.lucko.spark.common.sampler.node.StackTraceNode; import me.lucko.spark.common.util.MethodDisambiguator; import java.util.Objects; /** - * Function to determine if {@link StackTraceNode}s should be merged. + * Strategy used to determine if {@link StackTraceNode}s should be merged. */ -public final class MergeMode { +public enum MergeStrategy { - public static MergeMode sameMethod(MethodDisambiguator methodDisambiguator) { - return new MergeMode(methodDisambiguator, false); - } - - public static MergeMode separateParentCalls(MethodDisambiguator methodDisambiguator) { - return new MergeMode(methodDisambiguator, true); - } + SAME_METHOD(false), + SEPARATE_PARENT_CALLS(true); - private final MethodDisambiguator methodDisambiguator; private final boolean separateParentCalls; - MergeMode(MethodDisambiguator methodDisambiguator, boolean separateParentCalls) { - this.methodDisambiguator = methodDisambiguator; + MergeStrategy(boolean separateParentCalls) { this.separateParentCalls = separateParentCalls; } - public MethodDisambiguator getMethodDisambiguator() { - return this.methodDisambiguator; - } - public boolean separateParentCalls() { return this.separateParentCalls; } @@ -56,11 +46,12 @@ public boolean separateParentCalls() { /** * Test if two stack trace nodes should be considered the same and merged. * + * @param disambiguator the method disambiguator * @param n1 the first node * @param n2 the second node * @return if the nodes should be merged */ - public boolean shouldMerge(StackTraceNode n1, StackTraceNode n2) { + public boolean shouldMerge(MethodDisambiguator disambiguator, StackTraceNode n1, StackTraceNode n2) { // are the class names the same? if (!n1.getClassName().equals(n2.getClassName())) { return false; @@ -77,8 +68,8 @@ public boolean shouldMerge(StackTraceNode n1, StackTraceNode n2) { } // are the method descriptions the same? (is it the same method?) - String desc1 = this.methodDisambiguator.disambiguate(n1).map(MethodDisambiguator.MethodDescription::getDesc).orElse(null); - String desc2 = this.methodDisambiguator.disambiguate(n2).map(MethodDisambiguator.MethodDescription::getDesc).orElse(null); + String desc1 = disambiguator.disambiguate(n1).map(MethodDisambiguator.MethodDescription::getDescription).orElse(null); + String desc2 = disambiguator.disambiguate(n2).map(MethodDisambiguator.MethodDescription::getDescription).orElse(null); if (desc1 == null && desc2 == null) { return true; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java index 163365cb..d3b77b41 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java @@ -22,10 +22,7 @@ import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; -import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; -import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; @@ -86,7 +83,7 @@ public boolean removeTimeWindows(IntPredicate predicate) { * * @return the total times */ - protected double[] encodeTimesForProto(ProtoTimeEncoder encoder) { + public double[] encodeTimesForProto(ProtoTimeEncoder encoder) { return encoder.encode(this.times); } @@ -107,35 +104,11 @@ protected StackTraceNode resolveChild(StackTraceNode.Description description) { * * @param other the other node */ - protected void merge(AbstractNode other) { + public void merge(AbstractNode other) { other.times.forEach((key, value) -> getTimeAccumulator(key).add(value.longValue())); for (Map.Entry child : other.children.entrySet()) { resolveChild(child.getKey()).merge(child.getValue()); } } - protected List exportChildren(MergeMode mergeMode) { - if (this.children.isEmpty()) { - return Collections.emptyList(); - } - - List list = new ArrayList<>(this.children.size()); - - outer: - for (StackTraceNode child : this.children.values()) { - // attempt to find an existing node we can merge into - for (StackTraceNode other : list) { - if (mergeMode.shouldMerge(other, child)) { - other.merge(child); - continue outer; - } - } - - // just add - list.add(child); - } - - return list; - } - } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java index fec71237..27cfa540 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java @@ -20,9 +20,6 @@ package me.lucko.spark.common.sampler.node; -import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; -import me.lucko.spark.common.util.MethodDisambiguator; -import me.lucko.spark.proto.SparkSamplerProtos; import org.checkerframework.checker.nullness.qual.Nullable; import java.util.Objects; @@ -45,58 +42,33 @@ public StackTraceNode(Description description) { } public String getClassName() { - return this.description.className; + return this.description.className(); } public String getMethodName() { - return this.description.methodName; + return this.description.methodName(); } public String getMethodDescription() { - return this.description.methodDescription; + return this.description instanceof AsyncDescription + ? ((AsyncDescription) this.description).methodDescription() + : null; } public int getLineNumber() { - return this.description.lineNumber; + return this.description instanceof JavaDescription + ? ((JavaDescription) this.description).lineNumber() + : NULL_LINE_NUMBER; } public int getParentLineNumber() { - return this.description.parentLineNumber; - } - - public SparkSamplerProtos.StackTraceNode toProto(MergeMode mergeMode, ProtoTimeEncoder timeEncoder, Iterable childrenRefs) { - SparkSamplerProtos.StackTraceNode.Builder proto = SparkSamplerProtos.StackTraceNode.newBuilder() - .setClassName(this.description.className) - .setMethodName(this.description.methodName); - - double[] times = encodeTimesForProto(timeEncoder); - for (double time : times) { - proto.addTimes(time); - } - - if (this.description.lineNumber >= 0) { - proto.setLineNumber(this.description.lineNumber); - } - - if (mergeMode.separateParentCalls() && this.description.parentLineNumber >= 0) { - proto.setParentLineNumber(this.description.parentLineNumber); - } - - if (this.description.methodDescription != null) { - proto.setMethodDesc(this.description.methodDescription); - } else { - mergeMode.getMethodDisambiguator().disambiguate(this) - .map(MethodDisambiguator.MethodDescription::getDesc) - .ifPresent(proto::setMethodDesc); - } - - proto.addAllChildrenRefs(childrenRefs); - - return proto.build(); + return this.description instanceof JavaDescription + ? ((JavaDescription) this.description).parentLineNumber() + : NULL_LINE_NUMBER; } /** - * Function to construct a {@link StackTraceNode.Description} from a stack trace element + * Function to construct a {@link Description} from a stack trace element * of type {@code T}. * * @param the stack trace element type, e.g. {@link java.lang.StackTraceElement} @@ -114,53 +86,101 @@ public interface Describer { Description describe(T element, @Nullable T parent); } - /** - * Encapsulates the attributes of a {@link StackTraceNode}. - */ - public static final class Description { + public interface Description { + String className(); + + String methodName(); + } + + public static final class AsyncDescription implements Description { private final String className; private final String methodName; - - // async-profiler private final String methodDescription; - // Java + private final int hash; + + public AsyncDescription(String className, String methodName, String methodDescription) { + this.className = className; + this.methodName = methodName; + this.methodDescription = methodDescription; + this.hash = Objects.hash(this.className, this.methodName, this.methodDescription); + } + + @Override + public String className() { + return this.className; + } + + @Override + public String methodName() { + return this.methodName; + } + + public String methodDescription() { + return this.methodDescription; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AsyncDescription description = (AsyncDescription) o; + return this.hash == description.hash && + this.className.equals(description.className) && + this.methodName.equals(description.methodName) && + Objects.equals(this.methodDescription, description.methodDescription); + } + + @Override + public int hashCode() { + return this.hash; + } + } + + public static final class JavaDescription implements Description { + private final String className; + private final String methodName; private final int lineNumber; private final int parentLineNumber; private final int hash; - // Constructor used by the Java sampler - public Description(String className, String methodName, int lineNumber, int parentLineNumber) { + public JavaDescription(String className, String methodName, int lineNumber, int parentLineNumber) { this.className = className; this.methodName = methodName; - this.methodDescription = null; this.lineNumber = lineNumber; this.parentLineNumber = parentLineNumber; this.hash = Objects.hash(this.className, this.methodName, this.lineNumber, this.parentLineNumber); } - // Constructor used by the async-profiler sampler - public Description(String className, String methodName, String methodDescription) { - this.className = className; - this.methodName = methodName; - this.methodDescription = methodDescription; - this.lineNumber = StackTraceNode.NULL_LINE_NUMBER; - this.parentLineNumber = StackTraceNode.NULL_LINE_NUMBER; - this.hash = Objects.hash(this.className, this.methodName, this.methodDescription); + @Override + public String className() { + return this.className; + } + + @Override + public String methodName() { + return this.methodName; + } + + public int lineNumber() { + return this.lineNumber; + } + + public int parentLineNumber() { + return this.parentLineNumber; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - Description description = (Description) o; + JavaDescription description = (JavaDescription) o; return this.hash == description.hash && this.lineNumber == description.lineNumber && this.parentLineNumber == description.parentLineNumber && this.className.equals(description.className) && - this.methodName.equals(description.methodName) && - Objects.equals(this.methodDescription, description.methodDescription); + this.methodName.equals(description.methodName); } @Override diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java index 37ff359d..f934e535 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java @@ -20,16 +20,9 @@ package me.lucko.spark.common.sampler.node; -import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; -import me.lucko.spark.common.util.IndexedListBuilder; -import me.lucko.spark.proto.SparkSamplerProtos; - import java.util.ArrayDeque; import java.util.Collection; -import java.util.Deque; import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; import java.util.Queue; import java.util.function.IntPredicate; @@ -134,92 +127,4 @@ public boolean removeTimeWindowsRecursively(IntPredicate predicate) { return getTimeWindows().isEmpty(); } - public SparkSamplerProtos.ThreadNode toProto(MergeMode mergeMode, ProtoTimeEncoder timeEncoder) { - SparkSamplerProtos.ThreadNode.Builder proto = SparkSamplerProtos.ThreadNode.newBuilder() - .setName(getThreadLabel()); - - double[] times = encodeTimesForProto(timeEncoder); - for (double time : times) { - proto.addTimes(time); - } - - // When converting to a proto, we change the data structure from a recursive tree to an array. - // Effectively, instead of: - // - // { - // data: 'one', - // children: [ - // { - // data: 'two', - // children: [{ data: 'four' }] - // }, - // { data: 'three' } - // ] - // } - // - // we transmit: - // - // [ - // { data: 'one', children: [1, 2] }, - // { data: 'two', children: [3] } - // { data: 'three', children: [] } - // { data: 'four', children: [] } - // ] - // - - // the flattened array of nodes - IndexedListBuilder nodesArray = new IndexedListBuilder<>(); - - // Perform a depth-first post order traversal of the tree - Deque stack = new ArrayDeque<>(); - - // push the thread node's children to the stack - List childrenRefs = new LinkedList<>(); - for (StackTraceNode child : exportChildren(mergeMode)) { - stack.push(new Node(child, childrenRefs)); - } - - Node node; - while (!stack.isEmpty()) { - node = stack.peek(); - - // on the first visit, just push this node's children and leave it on the stack - if (node.firstVisit) { - for (StackTraceNode child : node.stackTraceNode.exportChildren(mergeMode)) { - stack.push(new Node(child, node.childrenRefs)); - } - node.firstVisit = false; - continue; - } - - // convert StackTraceNode to a proto - // - at this stage, we have already visited this node's children - // - the refs for each child are stored in node.childrenRefs - SparkSamplerProtos.StackTraceNode childProto = node.stackTraceNode.toProto(mergeMode, timeEncoder, node.childrenRefs); - - // add the child proto to the nodes array, and record the ref in the parent - int childIndex = nodesArray.add(childProto); - node.parentChildrenRefs.add(childIndex); - - // pop from the stack - stack.pop(); - } - - proto.addAllChildrenRefs(childrenRefs); - proto.addAllChildren(nodesArray.build()); - - return proto.build(); - } - - private static final class Node { - private final StackTraceNode stackTraceNode; - private boolean firstVisit = true; - private final List childrenRefs = new LinkedList<>(); - private final List parentChildrenRefs; - - private Node(StackTraceNode node, List parentChildrenRefs) { - this.stackTraceNode = node; - this.parentChildrenRefs = parentChildrenRefs; - } - } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/exporter/AbstractNodeExporter.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/exporter/AbstractNodeExporter.java new file mode 100644 index 00000000..bc548572 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/exporter/AbstractNodeExporter.java @@ -0,0 +1,136 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.node.exporter; + +import me.lucko.spark.common.sampler.node.StackTraceNode; +import me.lucko.spark.common.sampler.node.ThreadNode; +import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; +import me.lucko.spark.common.util.IndexedListBuilder; +import me.lucko.spark.proto.SparkSamplerProtos; + +import java.util.ArrayDeque; +import java.util.Collection; +import java.util.Deque; +import java.util.LinkedList; +import java.util.List; + +public abstract class AbstractNodeExporter implements NodeExporter { + protected final ProtoTimeEncoder timeEncoder; + + protected AbstractNodeExporter(ProtoTimeEncoder timeEncoder) { + this.timeEncoder = timeEncoder; + } + + @Override + public SparkSamplerProtos.ThreadNode export(ThreadNode threadNode) { + SparkSamplerProtos.ThreadNode.Builder proto = SparkSamplerProtos.ThreadNode.newBuilder() + .setName(threadNode.getThreadLabel()); + + double[] times = threadNode.encodeTimesForProto(this.timeEncoder); + for (double time : times) { + proto.addTimes(time); + } + + // When converting to a proto, we change the data structure from a recursive tree to an array. + // Effectively, instead of: + // + // { + // data: 'one', + // children: [ + // { + // data: 'two', + // children: [{ data: 'four' }] + // }, + // { data: 'three' } + // ] + // } + // + // we transmit: + // + // [ + // { data: 'one', children: [1, 2] }, + // { data: 'two', children: [3] } + // { data: 'three', children: [] } + // { data: 'four', children: [] } + // ] + // + + // the flattened array of nodes + IndexedListBuilder nodesArray = new IndexedListBuilder<>(); + + // Perform a depth-first post order traversal of the tree + Deque stack = new ArrayDeque<>(); + + // push the thread node's children to the stack + List childrenRefs = new LinkedList<>(); + for (StackTraceNode child : exportChildren(threadNode.getChildren())) { + stack.push(new Node(child, childrenRefs)); + } + + Node node; + while (!stack.isEmpty()) { + node = stack.peek(); + + // on the first visit, just push this node's children and leave it on the stack + if (node.firstVisit) { + for (StackTraceNode child : exportChildren(node.stackTraceNode.getChildren())) { + stack.push(new Node(child, node.childrenRefs)); + } + node.firstVisit = false; + continue; + } + + // convert StackTraceNode to a proto + // - at this stage, we have already visited this node's children + // - the refs for each child are stored in node.childrenRefs + SparkSamplerProtos.StackTraceNode childProto = this.export(node.stackTraceNode, node.childrenRefs); + + // add the child proto to the nodes array, and record the ref in the parent + int childIndex = nodesArray.add(childProto); + node.parentChildrenRefs.add(childIndex); + + // pop from the stack + stack.pop(); + } + + proto.addAllChildrenRefs(childrenRefs); + proto.addAllChildren(nodesArray.build()); + + return proto.build(); + } + + protected abstract SparkSamplerProtos.StackTraceNode export(StackTraceNode stackTraceNode, Iterable childrenRefs); + + protected abstract Collection exportChildren(Collection children); + + private static final class Node { + private final StackTraceNode stackTraceNode; + private boolean firstVisit = true; + private final List childrenRefs = new LinkedList<>(); + private final List parentChildrenRefs; + + private Node(StackTraceNode node, List parentChildrenRefs) { + this.stackTraceNode = node; + this.parentChildrenRefs = parentChildrenRefs; + } + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/exporter/NodeExporter.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/exporter/NodeExporter.java new file mode 100644 index 00000000..b599fc01 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/exporter/NodeExporter.java @@ -0,0 +1,39 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.node.exporter; + +import me.lucko.spark.common.sampler.node.ThreadNode; +import me.lucko.spark.proto.SparkSamplerProtos; + +/** + * Exports a {@link ThreadNode} to a protobuf message. + */ +public interface NodeExporter { + + /** + * Exports a {@link ThreadNode} to a protobuf message. + * + * @param threadNode the thread node + * @return the exported protobuf message + */ + SparkSamplerProtos.ThreadNode export(ThreadNode threadNode); + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java index c856123a..a3b4f024 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java @@ -289,7 +289,7 @@ private void visitStackNode(StackTraceNode node) { if (node.getMethodDescription() != null) { MethodCall methodCall = new MethodCall(node.getClassName(), node.getMethodName(), node.getMethodDescription()); this.methodSources.computeIfAbsent(methodCall, this.lookup::identify); - } else { + } else if (node.getLineNumber() != StackTraceNode.NULL_LINE_NUMBER) { MethodCallByLine methodCall = new MethodCallByLine(node.getClassName(), node.getMethodName(), node.getLineNumber()); this.lineSources.computeIfAbsent(methodCall, this.lookup::identify); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/MethodDisambiguator.java b/spark-common/src/main/java/me/lucko/spark/common/util/MethodDisambiguator.java index 3b0ddc3a..8553abbf 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/MethodDisambiguator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/MethodDisambiguator.java @@ -85,6 +85,29 @@ public Optional disambiguate(String className, String methodN } } + private ComputedClass compute(String className) throws IOException { + final ImmutableListMultimap.Builder descriptionsByName = ImmutableListMultimap.builder(); + final Map descriptionsByLine = new HashMap<>(); + + ClassReader classReader = getClassReader(className); + classReader.accept(new ClassVisitor(Opcodes.ASM7) { + @Override + public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { + MethodDescription description = new MethodDescription(name, descriptor); + descriptionsByName.put(name, description); + + return new MethodVisitor(Opcodes.ASM7) { + @Override + public void visitLineNumber(int line, Label start) { + descriptionsByLine.put(line, description); + } + }; + } + }, Opcodes.ASM7); + + return new ComputedClass(descriptionsByName.build(), ImmutableMap.copyOf(descriptionsByLine)); + } + private ClassReader getClassReader(String className) throws IOException { String resource = className.replace('.', '/') + ".class"; @@ -106,28 +129,6 @@ private ClassReader getClassReader(String className) throws IOException { throw new IOException("Unable to get resource: " + className); } - private ComputedClass compute(String className) throws IOException { - ImmutableListMultimap.Builder descriptionsByName = ImmutableListMultimap.builder(); - Map descriptionsByLine = new HashMap<>(); - - getClassReader(className).accept(new ClassVisitor(Opcodes.ASM7) { - @Override - public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { - MethodDescription description = new MethodDescription(name, descriptor); - descriptionsByName.put(name, description); - - return new MethodVisitor(Opcodes.ASM7) { - @Override - public void visitLineNumber(int line, Label start) { - descriptionsByLine.put(line, description); - } - }; - } - }, Opcodes.ASM7); - - return new ComputedClass(descriptionsByName.build(), ImmutableMap.copyOf(descriptionsByLine)); - } - private static final class ComputedClass { private static final ComputedClass EMPTY = new ComputedClass(ImmutableListMultimap.of(), ImmutableMap.of()); @@ -142,24 +143,24 @@ private ComputedClass(ListMultimap descriptionsByName public static final class MethodDescription { private final String name; - private final String desc; + private final String description; - private MethodDescription(String name, String desc) { + private MethodDescription(String name, String description) { this.name = name; - this.desc = desc; + this.description = description; } public String getName() { return this.name; } - public String getDesc() { - return this.desc; + public String getDescription() { + return this.description; } @Override public String toString() { - return this.name + this.desc; + return this.name + this.description; } } diff --git a/spark-common/src/test/java/me/lucko/spark/common/sampler/SamplerTest.java b/spark-common/src/test/java/me/lucko/spark/common/sampler/SamplerTest.java index 7db14a38..f6d9f875 100644 --- a/spark-common/src/test/java/me/lucko/spark/common/sampler/SamplerTest.java +++ b/spark-common/src/test/java/me/lucko/spark/common/sampler/SamplerTest.java @@ -22,9 +22,8 @@ import me.lucko.spark.common.sampler.async.AsyncSampler; import me.lucko.spark.common.sampler.java.JavaSampler; -import me.lucko.spark.common.sampler.node.MergeMode; +import me.lucko.spark.common.sampler.java.MergeStrategy; import me.lucko.spark.common.sampler.source.ClassSourceLookup; -import me.lucko.spark.common.util.MethodDisambiguator; import me.lucko.spark.proto.SparkSamplerProtos; import me.lucko.spark.test.TestClass2; import me.lucko.spark.test.plugin.TestCommandSender; @@ -74,9 +73,12 @@ public void testSampler(ProfilerType profilerType, @TempDir Path directory) { Sampler.ExportProps exportProps = new Sampler.ExportProps() .creator(TestCommandSender.INSTANCE.toData()) - .mergeMode(() -> MergeMode.sameMethod(new MethodDisambiguator(plugin.platform().createClassFinder()))) .classSourceLookup(() -> ClassSourceLookup.create(plugin.platform())); + if (profilerType == ProfilerType.JAVA) { + exportProps.mergeStrategy(MergeStrategy.SAME_METHOD); + } + SparkSamplerProtos.SamplerData proto = sampler.toProto(plugin.platform(), exportProps); assertNotNull(proto); diff --git a/spark-common/src/test/java/me/lucko/spark/common/sampler/node/NodeTest.java b/spark-common/src/test/java/me/lucko/spark/common/sampler/node/NodeTest.java index 7df9e59d..f1534a89 100644 --- a/spark-common/src/test/java/me/lucko/spark/common/sampler/node/NodeTest.java +++ b/spark-common/src/test/java/me/lucko/spark/common/sampler/node/NodeTest.java @@ -23,9 +23,9 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import me.lucko.spark.common.sampler.SamplerMode; +import me.lucko.spark.common.sampler.async.AsyncNodeExporter; import me.lucko.spark.common.sampler.async.AsyncStackTraceElement; import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; -import me.lucko.spark.common.util.MethodDisambiguator; import me.lucko.spark.proto.SparkSamplerProtos; import org.junit.jupiter.api.Test; @@ -39,7 +39,7 @@ public class NodeTest { - private static final StackTraceNode.Describer STACK_TRACE_DESCRIBER = (element, parent) -> new StackTraceNode.Description(element.getClassName(), element.getMethodName(), element.getMethodDescription()); + private static final StackTraceNode.Describer STACK_TRACE_DESCRIBER = (element, parent) -> new StackTraceNode.AsyncDescription(element.getClassName(), element.getMethodName(), element.getMethodDescription()); private static final int WINDOW = 10; private static final AsyncStackTraceElement NODE_0 = new AsyncStackTraceElement("java.lang.Thread", "run", "()V"); @@ -104,10 +104,7 @@ public void testToProto() { int[] keys = timeEncoder.getKeys(); assertArrayEquals(new int[]{WINDOW, WINDOW + 1}, keys); - SparkSamplerProtos.ThreadNode proto = threadNode.toProto( - MergeMode.sameMethod(new MethodDisambiguator(null)), - timeEncoder - ); + SparkSamplerProtos.ThreadNode proto = new AsyncNodeExporter(timeEncoder).export(threadNode); SparkSamplerProtos.ThreadNode expected = SparkSamplerProtos.ThreadNode.newBuilder() .setName("Test Thread")