diff --git a/README.md b/README.md index 74db6fb5..0c2fcb69 100644 --- a/README.md +++ b/README.md @@ -37,8 +37,8 @@ It is: It works by sampling statistical data about the systems activity, and constructing a call graph based on this data. The call graph is then displayed in an online viewer for further analysis by the user. There are two different profiler engines: -* Native `AsyncGetCallTrace` + `perf_events` - uses [async-profiler](https://github.com/jvm-profiling-tools/async-profiler) (*only available on Linux x86_64 systems*) -* Built-in Java `ThreadMXBean` - an improved version of the popular [WarmRoast profiler](https://github.com/sk89q/WarmRoast) by sk89q. +* Native/Async - uses the [async-profiler](https://github.com/async-profiler/async-profiler) library (*only available on Linux & macOS systems*) +* Java - uses `ThreadMXBean`, an improved version of the popular [WarmRoast profiler](https://github.com/sk89q/WarmRoast) by sk89q. ### :zap: Memory Inspection diff --git a/build.gradle b/build.gradle index 5d15ff22..cc6d1d2a 100644 --- a/build.gradle +++ b/build.gradle @@ -21,6 +21,21 @@ subprojects { patchVersion = determinePatchVersion() pluginVersion = baseVersion + '.' + patchVersion pluginDescription = 'spark is a performance profiling plugin/mod for Minecraft clients, servers and proxies.' + + applyExcludes = { Jar jarTask -> + jarTask.exclude 'module-info.class' + jarTask.exclude 'META-INF/maven/**' + jarTask.exclude 'META-INF/proguard/**' + jarTask.exclude 'META-INF/LICENSE' + jarTask.exclude 'META-INF/NOTICE' + // protobuf + jarTask.exclude '**/*.proto' + jarTask.exclude '**/*.proto.bin' + // async-profiler + jarTask.exclude 'linux-arm64/**' + jarTask.exclude 'linux-x64/**' + jarTask.exclude 'macos/**' + } } tasks.withType(JavaCompile).configureEach { diff --git a/spark-bukkit/build.gradle b/spark-bukkit/build.gradle index da46dc30..aace9b06 100644 --- a/spark-bukkit/build.gradle +++ b/spark-bukkit/build.gradle @@ -45,11 +45,7 @@ shadowJar { relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws' - exclude 'module-info.class' - exclude 'META-INF/maven/**' - exclude 'META-INF/proguard/**' - exclude '**/*.proto' - exclude '**/*.proto.bin' + project.applyExcludes(delegate) } artifacts { diff --git a/spark-bungeecord/build.gradle b/spark-bungeecord/build.gradle index 8438d5e6..95da3ae0 100644 --- a/spark-bungeecord/build.gradle +++ b/spark-bungeecord/build.gradle @@ -31,11 +31,7 @@ shadowJar { relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws' - exclude 'module-info.class' - exclude 'META-INF/maven/**' - exclude 'META-INF/proguard/**' - exclude '**/*.proto' - exclude '**/*.proto.bin' + project.applyExcludes(delegate) } artifacts { diff --git a/spark-common/build.gradle b/spark-common/build.gradle index ba3d2b48..a9626f2d 100644 --- a/spark-common/build.gradle +++ b/spark-common/build.gradle @@ -12,7 +12,7 @@ license { dependencies { api project(':spark-api') - implementation 'com.github.jvm-profiling-tools:async-profiler:v2.8.3' + implementation 'tools.profiler:async-profiler:3.0' implementation 'org.ow2.asm:asm:9.7' implementation 'net.bytebuddy:byte-buddy-agent:1.14.17' implementation 'com.google.protobuf:protobuf-javalite:4.28.2' diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java index 84aaa954..02645ff5 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java @@ -144,18 +144,8 @@ private static AsyncProfiler load(SparkPlatform platform) throws Exception { String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT); String jvm = System.getProperty("java.vm.name"); - // openj9 not supported by async-profiler at the moment - if (jvm.contains("OpenJ9")) { - throw new UnsupportedJvmException(jvm); - } - - if (os.equals("linux") && arch.equals("amd64") && isLinuxMusl()) { - arch = "amd64-musl"; - } - Table supported = ImmutableTable.builder() .put("linux", "amd64", "linux/amd64") - .put("linux", "amd64-musl", "linux/amd64-musl") .put("linux", "aarch64", "linux/aarch64") .put("macosx", "amd64", "macos") .put("macosx", "aarch64", "macos") @@ -242,20 +232,4 @@ public NativeLoadingException(Throwable cause) { super("A runtime error occurred whilst loading the native library", cause); } } - - // Checks if the system is using musl instead of glibc - private static boolean isLinuxMusl() { - try { - InputStream stream = new ProcessBuilder("sh", "-c", "ldd `which ls`") - .start() - .getInputStream(); - - BufferedReader reader = new BufferedReader(new InputStreamReader(stream)); - String output = reader.lines().collect(Collectors.joining()); - return output.contains("musl"); // shrug - } catch (Throwable e) { - // ignore - return false; - } - } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index 994c03bc..966a7d35 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -168,8 +168,12 @@ private void scheduleTimeout() { } this.scheduler.schedule(() -> { - stop(false); - this.future.complete(this); + try { + stop(false); + this.future.complete(this); + } catch (Exception e) { + this.future.completeExceptionally(e); + } }, delay, TimeUnit.MILLISECONDS); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java index 60f65437..4a327f20 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java @@ -1,17 +1,6 @@ /* - * Copyright 2020 Andrei Pangin - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Copyright The async-profiler authors + * SPDX-License-Identifier: Apache-2.0 */ package me.lucko.spark.common.sampler.async.jfr; @@ -37,9 +26,11 @@ public void clear() { size = 0; } + // spark start public int size() { return this.size; } + // spark end public void put(long key, T value) { if (key == 0) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java index ea4985ee..5bb8a301 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java @@ -1,17 +1,6 @@ /* - * Copyright 2020 Andrei Pangin - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Copyright The async-profiler authors + * SPDX-License-Identifier: Apache-2.0 */ package me.lucko.spark.common.sampler.async.jfr; @@ -20,10 +9,14 @@ import java.io.Closeable; import java.io.IOException; +import java.lang.reflect.Constructor; +import java.lang.reflect.Field; import java.nio.ByteBuffer; +import java.nio.ByteOrder; import java.nio.channels.FileChannel; import java.nio.charset.StandardCharsets; import java.nio.file.Path; +import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.Collections; @@ -39,41 +32,55 @@ public class JfrReader implements Closeable { private static final int CHUNK_HEADER_SIZE = 68; private static final int CHUNK_SIGNATURE = 0x464c5200; + private static final byte STATE_NEW_CHUNK = 0; + private static final byte STATE_READING = 1; + private static final byte STATE_EOF = 2; + private static final byte STATE_INCOMPLETE = 3; + private final FileChannel ch; private ByteBuffer buf; + private final long fileSize; private long filePosition; + private byte state; - public boolean incomplete; public long startNanos = Long.MAX_VALUE; public long endNanos = Long.MIN_VALUE; public long startTicks = Long.MAX_VALUE; + public long chunkStartNanos; + public long chunkEndNanos; + public long chunkStartTicks; public long ticksPerSec; + public boolean stopAtNewChunk; public final Dictionary types = new Dictionary<>(); public final Map typesByName = new HashMap<>(); - public final Map threads = new HashMap<>(); // spark + public final Map threads = new HashMap<>(); // spark - convert to map public final Dictionary classes = new Dictionary<>(); + public final Dictionary strings = new Dictionary<>(); public final Dictionary symbols = new Dictionary<>(); public final Dictionary methods = new Dictionary<>(); public final Dictionary stackTraces = new Dictionary<>(); - public final Dictionary stackFrames = new Dictionary<>(); // spark - public final Map frameTypes = new HashMap<>(); - public final Map threadStates = new HashMap<>(); + public final Dictionary stackFrames = new Dictionary<>(); // spark - add field public final Map settings = new HashMap<>(); + public final Map> enums = new HashMap<>(); + + private final Dictionary> customEvents = new Dictionary<>(); private int executionSample; private int nativeMethodSample; + private int wallClockSample; private int allocationInNewTLAB; private int allocationOutsideTLAB; private int allocationSample; + private int liveObject; private int monitorEnter; private int threadPark; private int activeSetting; - private boolean activeSettingHasStack; public JfrReader(Path path) throws IOException { // spark - Path instead of String this.ch = FileChannel.open(path, StandardOpenOption.READ); // spark - Path instead of String this.buf = ByteBuffer.allocateDirect(BUFFER_SIZE); + this.fileSize = ch.size(); buf.flip(); ensureBytes(CHUNK_HEADER_SIZE); @@ -82,15 +89,52 @@ public JfrReader(Path path) throws IOException { // spark - Path instead of Stri } } + public JfrReader(ByteBuffer buf) throws IOException { + this.ch = null; + this.buf = buf; + this.fileSize = buf.limit(); + + buf.order(ByteOrder.BIG_ENDIAN); + if (!readChunk(0)) { + throw new IOException("Incomplete JFR file"); + } + } + @Override public void close() throws IOException { - ch.close(); + if (ch != null) { + ch.close(); + } + } + + public boolean eof() { + return state >= STATE_EOF; + } + + public boolean incomplete() { + return state == STATE_INCOMPLETE; } public long durationNanos() { return endNanos - startNanos; } + public void registerEvent(String name, Class eventClass) { + JfrClass type = typesByName.get(name); + if (type != null) { + try { + customEvents.put(type.id, eventClass.getConstructor(JfrReader.class)); + } catch (NoSuchMethodException e) { + throw new IllegalArgumentException("No suitable constructor found"); + } + } + } + + // Similar to eof(), but parses the next chunk header + public boolean hasMoreChunks() throws IOException { + return state == STATE_NEW_CHUNK ? readChunk(buf.position()) : state == STATE_READING; + } + public List readAllEvents() throws IOException { return readAllEvents(null); } @@ -116,41 +160,58 @@ public E readEvent(Class cls) throws IOException { int type = getVarint(); if (type == 'L' && buf.getInt(pos) == CHUNK_SIGNATURE) { - if (readChunk(pos)) { + if (state != STATE_NEW_CHUNK && stopAtNewChunk) { + buf.position(pos); + state = STATE_NEW_CHUNK; + } else if (readChunk(pos)) { continue; } - break; + return null; } if (type == executionSample || type == nativeMethodSample) { - if (cls == null || cls == ExecutionSample.class) return (E) readExecutionSample(); + if (cls == null || cls == ExecutionSample.class) return (E) readExecutionSample(false); + } else if (type == wallClockSample) { + if (cls == null || cls == ExecutionSample.class) return (E) readExecutionSample(true); } else if (type == allocationInNewTLAB) { if (cls == null || cls == AllocationSample.class) return (E) readAllocationSample(true); } else if (type == allocationOutsideTLAB || type == allocationSample) { if (cls == null || cls == AllocationSample.class) return (E) readAllocationSample(false); + } else if (type == liveObject) { + if (cls == null || cls == LiveObject.class) return (E) readLiveObject(); } else if (type == monitorEnter) { if (cls == null || cls == ContendedLock.class) return (E) readContendedLock(false); } else if (type == threadPark) { if (cls == null || cls == ContendedLock.class) return (E) readContendedLock(true); } else if (type == activeSetting) { readActiveSetting(); - } - - if ((pos += size) <= buf.limit()) { - buf.position(pos); } else { - seek(filePosition + pos); + Constructor customEvent = customEvents.get(type); + if (customEvent != null && (cls == null || cls == customEvent.getDeclaringClass())) { + try { + return (E) customEvent.newInstance(this); + } catch (ReflectiveOperationException e) { + throw new IllegalStateException(e); + } finally { + seek(filePosition + pos + size); + } + } } + + seek(filePosition + pos + size); } + + state = STATE_EOF; return null; } - private ExecutionSample readExecutionSample() { + private ExecutionSample readExecutionSample(boolean hasSamples) { long time = getVarlong(); int tid = getVarint(); int stackTraceId = getVarint(); int threadState = getVarint(); - return new ExecutionSample(time, tid, stackTraceId, threadState); + int samples = hasSamples ? getVarint() : 1; + return new ExecutionSample(time, tid, stackTraceId, threadState, samples); } private AllocationSample readAllocationSample(boolean tlab) { @@ -163,6 +224,16 @@ private AllocationSample readAllocationSample(boolean tlab) { return new AllocationSample(time, tid, stackTraceId, classId, allocationSize, tlabSize); } + private LiveObject readLiveObject() { + long time = getVarlong(); + int tid = getVarint(); + int stackTraceId = getVarint(); + int classId = getVarint(); + long allocationSize = getVarlong(); + long allocatimeTime = getVarlong(); + return new LiveObject(time, tid, stackTraceId, classId, allocationSize, allocatimeTime); + } + private ContendedLock readContendedLock(boolean hasTimeout) { long time = getVarlong(); long duration = getVarlong(); @@ -176,11 +247,12 @@ private ContendedLock readContendedLock(boolean hasTimeout) { } private void readActiveSetting() { - long time = getVarlong(); - long duration = getVarlong(); - int tid = getVarint(); - if (activeSettingHasStack) getVarint(); - long id = getVarlong(); + for (JfrField field : typesByName.get("jdk.ActiveSetting").fields) { + getVarlong(); + if ("id".equals(field.name)) { + break; + } + } String name = getString(); String value = getString(); settings.put(name, value); @@ -196,27 +268,38 @@ private boolean readChunk(int pos) throws IOException { throw new IOException("Unsupported JFR version: " + (version >>> 16) + "." + (version & 0xffff)); } + long chunkStart = filePosition + pos; + long chunkSize = buf.getLong(pos + 8); + if (chunkStart + chunkSize > fileSize) { + state = STATE_INCOMPLETE; + return false; + } + long cpOffset = buf.getLong(pos + 16); long metaOffset = buf.getLong(pos + 24); if (cpOffset == 0 || metaOffset == 0) { - incomplete = true; + state = STATE_INCOMPLETE; return false; } - startNanos = Math.min(startNanos, buf.getLong(pos + 32)); - endNanos = Math.max(endNanos, buf.getLong(pos + 32) + buf.getLong(pos + 40)); - startTicks = Math.min(startTicks, buf.getLong(pos + 48)); + chunkStartNanos = buf.getLong(pos + 32); + chunkEndNanos = buf.getLong(pos + 32) + buf.getLong(pos + 40); + chunkStartTicks = buf.getLong(pos + 48); ticksPerSec = buf.getLong(pos + 56); + startNanos = Math.min(startNanos, chunkStartNanos); + endNanos = Math.max(endNanos, chunkEndNanos); + startTicks = Math.min(startTicks, chunkStartTicks); + types.clear(); typesByName.clear(); - long chunkStart = filePosition + pos; readMeta(chunkStart + metaOffset); readConstantPool(chunkStart + cpOffset); cacheEventTypes(); seek(chunkStart + CHUNK_HEADER_SIZE); + state = STATE_READING; return true; } @@ -224,7 +307,8 @@ private void readMeta(long metaOffset) throws IOException { seek(metaOffset); ensureBytes(5); - ensureBytes(getVarint() - buf.position()); + int posBeforeSize = buf.position(); + ensureBytes(getVarint() - (buf.position() - posBeforeSize)); getVarint(); getVarlong(); getVarlong(); @@ -277,7 +361,8 @@ private void readConstantPool(long cpOffset) throws IOException { seek(cpOffset); ensureBytes(5); - ensureBytes(getVarint() - buf.position()); + int posBeforeSize = buf.position(); + ensureBytes(getVarint() - (buf.position() - posBeforeSize)); getVarint(); getVarlong(); getVarlong(); @@ -298,10 +383,13 @@ private void readConstants(JfrClass type) { buf.position(buf.position() + (CHUNK_HEADER_SIZE + 3)); break; case "java.lang.Thread": - readThreads(type.field("group") != null); + readThreads(type.fields.size()); break; case "java.lang.Class": - readClasses(type.field("hidden") != null); + readClasses(type.fields.size()); + break; + case "java.lang.String": + readStrings(); break; case "jdk.types.Symbol": readSymbols(); @@ -312,31 +400,29 @@ private void readConstants(JfrClass type) { case "jdk.types.StackTrace": readStackTraces(); break; - case "jdk.types.FrameType": - readMap(frameTypes); - break; - case "jdk.types.ThreadState": - readMap(threadStates); - break; default: - readOtherConstants(type.fields); + if (type.simpleType && type.fields.size() == 1) { + readEnumValues(type.name); + } else { + readOtherConstants(type.fields); + } } } - private void readThreads(boolean hasGroup) { - int count = getVarint(); //threads.preallocate(getVarint()); + private void readThreads(int fieldCount) { + int count = getVarint(); // spark - don't preallocate for (int i = 0; i < count; i++) { long id = getVarlong(); String osName = getString(); int osThreadId = getVarint(); String javaName = getString(); long javaThreadId = getVarlong(); - if (hasGroup) getVarlong(); + readFields(fieldCount - 4); threads.put(id, javaName != null ? javaName : osName); } } - private void readClasses(boolean hasHidden) { + private void readClasses(int fieldCount) { int count = classes.preallocate(getVarint()); for (int i = 0; i < count; i++) { long id = getVarlong(); @@ -344,7 +430,7 @@ private void readClasses(boolean hasHidden) { long name = getVarlong(); long pkg = getVarlong(); int modifiers = getVarint(); - if (hasHidden) getVarint(); + readFields(fieldCount - 4); classes.put(id, new ClassRef(name)); } } @@ -360,7 +446,7 @@ private void readMethods() { int hidden = getVarint(); methods.put(id, new MethodRef(cls, name, sig)); } - stackFrames.preallocate(count); // spark + stackFrames.preallocate(count); // spark - preallocate frames size } private void readStackTraces() { @@ -388,6 +474,13 @@ private StackTrace readStackTrace() { return new StackTrace(methods, types, locations); } + private void readStrings() { + int count = strings.preallocate(getVarint()); + for (int i = 0; i < count; i++) { + strings.put(getVarlong(), getString()); + } + } + private void readSymbols() { int count = symbols.preallocate(getVarint()); for (int i = 0; i < count; i++) { @@ -399,11 +492,13 @@ private void readSymbols() { } } - private void readMap(Map map) { + private void readEnumValues(String typeName) { + HashMap map = new HashMap<>(); int count = getVarint(); for (int i = 0; i < count; i++) { - map.put(getVarint(), getString()); + map.put((int) getVarlong(), getString()); } + enums.put(typeName, map); } private void readOtherConstants(List fields) { @@ -432,16 +527,28 @@ private void readFields(boolean[] numeric) { } } + private void readFields(int count) { + while (count-- > 0) { + getVarlong(); + } + } + private void cacheEventTypes() { executionSample = getTypeId("jdk.ExecutionSample"); nativeMethodSample = getTypeId("jdk.NativeMethodSample"); + wallClockSample = getTypeId("profiler.WallClockSample"); allocationInNewTLAB = getTypeId("jdk.ObjectAllocationInNewTLAB"); allocationOutsideTLAB = getTypeId("jdk.ObjectAllocationOutsideTLAB"); allocationSample = getTypeId("jdk.ObjectAllocationSample"); + liveObject = getTypeId("profiler.LiveObject"); monitorEnter = getTypeId("jdk.JavaMonitorEnter"); threadPark = getTypeId("jdk.ThreadPark"); activeSetting = getTypeId("jdk.ActiveSetting"); - activeSettingHasStack = activeSetting >= 0 && typesByName.get("jdk.ActiveSetting").field("stackTrace") != null; + + registerEvent("jdk.CPULoad", CPULoad.class); + registerEvent("jdk.GCHeapSummary", GCHeapSummary.class); + registerEvent("jdk.ObjectCount", ObjectCount.class); + registerEvent("jdk.ObjectCountAfterGC", ObjectCount.class); } private int getTypeId(String typeName) { @@ -449,7 +556,23 @@ private int getTypeId(String typeName) { return type != null ? type.id : -1; } - private int getVarint() { + public int getEnumKey(String typeName, String value) { + Map enumValues = enums.get(typeName); + if (enumValues != null) { + for (Map.Entry entry : enumValues.entrySet()) { + if (value.equals(entry.getValue())) { + return entry.getKey(); + } + } + } + return -1; + } + + public String getEnumValue(String typeName, int key) { + return enums.get(typeName).get(key); + } + + public int getVarint() { int result = 0; for (int shift = 0; ; shift += 7) { byte b = buf.get(); @@ -460,7 +583,7 @@ private int getVarint() { } } - private long getVarlong() { + public long getVarlong() { long result = 0; for (int shift = 0; shift < 56; shift += 7) { byte b = buf.get(); @@ -472,12 +595,22 @@ private long getVarlong() { return result | (buf.get() & 0xffL) << 56; } - private String getString() { + public float getFloat() { + return buf.getFloat(); + } + + public double getDouble() { + return buf.getDouble(); + } + + public String getString() { switch (buf.get()) { case 0: return null; case 1: return ""; + case 2: + return strings.get(getVarlong()); case 3: return new String(getBytes(), StandardCharsets.UTF_8); case 4: { @@ -494,16 +627,21 @@ private String getString() { } } - private byte[] getBytes() { + public byte[] getBytes() { byte[] bytes = new byte[getVarint()]; buf.get(bytes); return bytes; } private void seek(long pos) throws IOException { - filePosition = pos; - ch.position(pos); - buf.rewind().flip(); + long bufPosition = pos - filePosition; + if (bufPosition >= 0 && bufPosition <= buf.limit()) { + buf.position((int) bufPosition); + } else { + filePosition = pos; + ch.position(pos); + buf.rewind().flip(); + } } private boolean ensureBytes(int needed) throws IOException { @@ -511,6 +649,10 @@ private boolean ensureBytes(int needed) throws IOException { return true; } + if (ch == null) { + return false; + } + filePosition += buf.position(); if (buf.capacity() < needed) { @@ -544,11 +686,13 @@ void addChild(Element e) { static class JfrClass extends Element { final int id; + final boolean simpleType; final String name; final List fields; JfrClass(Map attributes) { this.id = Integer.parseInt(attributes.get("id")); + this.simpleType = "true".equals(attributes.get("simpleType")); this.name = attributes.get("name"); this.fields = new ArrayList<>(2); } @@ -560,7 +704,7 @@ void addChild(Element e) { } } - JfrField field(String name) { + public JfrField field(String name) { for (JfrField field : fields) { if (field.name.equals(name)) { return field; @@ -606,162 +750,196 @@ public StackTrace(long[] methods, byte[] types, int[] locations) { } } - public static abstract class Event implements Comparable { - public final long time; - public final int tid; - public final int stackTraceId; + public static class AllocationSample extends Event { + public final int classId; + public final long allocationSize; + public final long tlabSize; - protected Event(long time, int tid, int stackTraceId) { - this.time = time; - this.tid = tid; - this.stackTraceId = stackTraceId; + public AllocationSample(long time, int tid, int stackTraceId, int classId, long allocationSize, long tlabSize) { + super(time, tid, stackTraceId); + this.classId = classId; + this.allocationSize = allocationSize; + this.tlabSize = tlabSize; } @Override - public int compareTo(Event o) { - return Long.compare(time, o.time); + public int hashCode() { + return classId * 127 + stackTraceId + (tlabSize == 0 ? 17 : 0); } @Override - public int hashCode() { - return stackTraceId; + public boolean sameGroup(Event o) { + if (o instanceof AllocationSample) { + AllocationSample a = (AllocationSample) o; + return classId == a.classId && (tlabSize == 0) == (a.tlabSize == 0); + } + return false; } - public boolean sameGroup(Event o) { - return getClass() == o.getClass(); + @Override + public long classId() { + return classId; } + @Override public long value() { - return 1; + return tlabSize != 0 ? tlabSize : allocationSize; } } - public static class EventAggregator { - private static final int INITIAL_CAPACITY = 1024; - - private final boolean threads; - private final boolean total; - private Event[] keys; - private long[] values; - private int size; + static class CPULoad extends Event { + public final float jvmUser; + public final float jvmSystem; + public final float machineTotal; - public EventAggregator(boolean threads, boolean total) { - this.threads = threads; - this.total = total; - this.keys = new Event[INITIAL_CAPACITY]; - this.values = new long[INITIAL_CAPACITY]; + public CPULoad(JfrReader jfr) { + super(jfr.getVarlong(), 0, 0); + this.jvmUser = jfr.getFloat(); + this.jvmSystem = jfr.getFloat(); + this.machineTotal = jfr.getFloat(); } + } - public void collect(Event e) { - int mask = keys.length - 1; - int i = hashCode(e) & mask; - while (keys[i] != null) { - if (sameGroup(keys[i], e)) { - values[i] += total ? e.value() : 1; - return; - } - i = (i + 1) & mask; - } + static class ContendedLock extends Event { + public final long duration; + public final int classId; - keys[i] = e; - values[i] = total ? e.value() : 1; + public ContendedLock(long time, int tid, int stackTraceId, long duration, int classId) { + super(time, tid, stackTraceId); + this.duration = duration; + this.classId = classId; + } - if (++size * 2 > keys.length) { - resize(keys.length * 2); - } + @Override + public int hashCode() { + return classId * 127 + stackTraceId; } - public long getValue(Event e) { - int mask = keys.length - 1; - int i = hashCode(e) & mask; - while (keys[i] != null && !sameGroup(keys[i], e)) { - i = (i + 1) & mask; + @Override + public boolean sameGroup(Event o) { + if (o instanceof ContendedLock) { + ContendedLock c = (ContendedLock) o; + return classId == c.classId; } - return values[i]; + return false; } - public void forEach(Visitor visitor) { - for (int i = 0; i < keys.length; i++) { - if (keys[i] != null) { - visitor.visit(keys[i], values[i]); - } - } + @Override + public long classId() { + return classId; + } + + @Override + public long value() { + return duration; } + } + + public static abstract class Event implements Comparable { + public final long time; + public final int tid; + public final int stackTraceId; - private int hashCode(Event e) { - return e.hashCode() + (threads ? e.tid * 31 : 0); + protected Event(long time, int tid, int stackTraceId) { + this.time = time; + this.tid = tid; + this.stackTraceId = stackTraceId; } - private boolean sameGroup(Event e1, Event e2) { - return e1.stackTraceId == e2.stackTraceId && (!threads || e1.tid == e2.tid) && e1.sameGroup(e2); + @Override + public int compareTo(Event o) { + return Long.compare(time, o.time); } - private void resize(int newCapacity) { - Event[] newKeys = new Event[newCapacity]; - long[] newValues = new long[newCapacity]; - int mask = newKeys.length - 1; + @Override + public int hashCode() { + return stackTraceId; + } - for (int i = 0; i < keys.length; i++) { - if (keys[i] != null) { - for (int j = hashCode(keys[i]) & mask; ; j = (j + 1) & mask) { - if (newKeys[j] == null) { - newKeys[j] = keys[i]; - newValues[j] = values[i]; - break; - } - } + @Override + public String toString() { + StringBuilder sb = new StringBuilder(getClass().getSimpleName()) + .append("{time=").append(time) + .append(",tid=").append(tid) + .append(",stackTraceId=").append(stackTraceId); + for (Field f : getClass().getDeclaredFields()) { + try { + sb.append(',').append(f.getName()).append('=').append(f.get(this)); + } catch (ReflectiveOperationException e) { + break; } } + return sb.append('}').toString(); + } + + public boolean sameGroup(Event o) { + return getClass() == o.getClass(); + } + + public long classId() { + return 0; + } - keys = newKeys; - values = newValues; + public long samples() { + return 1; } - public interface Visitor { - void visit(Event event, long value); + public long value() { + return 1; } } - public static class AllocationSample extends Event { - public final int classId; - public final long allocationSize; - public final long tlabSize; + public static class ExecutionSample extends Event { + public final int threadState; + public final int samples; - public AllocationSample(long time, int tid, int stackTraceId, int classId, long allocationSize, long tlabSize) { + public ExecutionSample(long time, int tid, int stackTraceId, int threadState, int samples) { super(time, tid, stackTraceId); - this.classId = classId; - this.allocationSize = allocationSize; - this.tlabSize = tlabSize; + this.threadState = threadState; + this.samples = samples; } @Override - public int hashCode() { - return classId * 127 + stackTraceId + (tlabSize == 0 ? 17 : 0); + public long samples() { + return samples; } @Override - public boolean sameGroup(Event o) { - if (o instanceof AllocationSample) { - AllocationSample a = (AllocationSample) o; - return classId == a.classId && (tlabSize == 0) == (a.tlabSize == 0); - } - return false; + public long value() { + return samples; } + } - @Override - public long value() { - return tlabSize != 0 ? tlabSize : allocationSize; + static class GCHeapSummary extends Event { + public final int gcId; + public final boolean afterGC; + public final long committed; + public final long reserved; + public final long used; + + public GCHeapSummary(JfrReader jfr) { + super(jfr.getVarlong(), 0, 0); + this.gcId = jfr.getVarint(); + this.afterGC = jfr.getVarint() > 0; + long start = jfr.getVarlong(); + long committedEnd = jfr.getVarlong(); + this.committed = jfr.getVarlong(); + long reservedEnd = jfr.getVarlong(); + this.reserved = jfr.getVarlong(); + this.used = jfr.getVarlong(); } } - public static class ContendedLock extends Event { - public final long duration; + static class LiveObject extends Event { public final int classId; + public final long allocationSize; + public final long allocationTime; - public ContendedLock(long time, int tid, int stackTraceId, long duration, int classId) { + public LiveObject(long time, int tid, int stackTraceId, int classId, long allocationSize, long allocationTime) { super(time, tid, stackTraceId); - this.duration = duration; this.classId = classId; + this.allocationSize = allocationSize; + this.allocationTime = allocationTime; } @Override @@ -771,26 +949,36 @@ public int hashCode() { @Override public boolean sameGroup(Event o) { - if (o instanceof ContendedLock) { - ContendedLock c = (ContendedLock) o; - return classId == c.classId; + if (o instanceof LiveObject) { + LiveObject a = (LiveObject) o; + return classId == a.classId; } return false; } + @Override + public long classId() { + return classId; + } + @Override public long value() { - return duration; + return allocationSize; } } - public static class ExecutionSample extends Event { - public final int threadState; + static class ObjectCount extends Event { + public final int gcId; + public final int classId; + public final long count; + public final long totalSize; - public ExecutionSample(long time, int tid, int stackTraceId, int threadState) { - super(time, tid, stackTraceId); - this.threadState = threadState; + public ObjectCount(JfrReader jfr) { + super(jfr.getVarlong(), 0, 0); + this.gcId = jfr.getVarint(); + this.classId = jfr.getVarint(); + this.count = jfr.getVarlong(); + this.totalSize = jfr.getVarlong(); } } - } diff --git a/spark-common/src/main/resources/spark-native/linux/aarch64/libasyncProfiler.so b/spark-common/src/main/resources/spark-native/linux/aarch64/libasyncProfiler.so index 800cf919..e02c7be7 100755 Binary files a/spark-common/src/main/resources/spark-native/linux/aarch64/libasyncProfiler.so and b/spark-common/src/main/resources/spark-native/linux/aarch64/libasyncProfiler.so differ diff --git a/spark-common/src/main/resources/spark-native/linux/amd64-musl/libasyncProfiler.so b/spark-common/src/main/resources/spark-native/linux/amd64-musl/libasyncProfiler.so deleted file mode 100755 index 3c81d1cb..00000000 Binary files a/spark-common/src/main/resources/spark-native/linux/amd64-musl/libasyncProfiler.so and /dev/null differ diff --git a/spark-common/src/main/resources/spark-native/linux/amd64/libasyncProfiler.so b/spark-common/src/main/resources/spark-native/linux/amd64/libasyncProfiler.so index 5af50718..a88f43af 100755 Binary files a/spark-common/src/main/resources/spark-native/linux/amd64/libasyncProfiler.so and b/spark-common/src/main/resources/spark-native/linux/amd64/libasyncProfiler.so differ diff --git a/spark-common/src/main/resources/spark-native/macos/libasyncProfiler.so b/spark-common/src/main/resources/spark-native/macos/libasyncProfiler.so index 4930c678..56334ccd 100755 Binary files a/spark-common/src/main/resources/spark-native/macos/libasyncProfiler.so and b/spark-common/src/main/resources/spark-native/macos/libasyncProfiler.so differ diff --git a/spark-fabric/build.gradle b/spark-fabric/build.gradle index a9cef99e..39d3ee75 100644 --- a/spark-fabric/build.gradle +++ b/spark-fabric/build.gradle @@ -84,11 +84,7 @@ shadowJar { relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws' - exclude 'module-info.class' - exclude 'META-INF/maven/**' - exclude 'META-INF/proguard/**' - exclude '**/*.proto' - exclude '**/*.proto.bin' + project.applyExcludes(delegate) dependencies { exclude(dependency('org.ow2.asm::')) diff --git a/spark-forge/build.gradle b/spark-forge/build.gradle index 948090f4..88fee079 100644 --- a/spark-forge/build.gradle +++ b/spark-forge/build.gradle @@ -52,11 +52,7 @@ shadowJar { relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws' - exclude 'module-info.class' - exclude 'META-INF/maven/**' - exclude 'META-INF/proguard/**' - exclude '**/*.proto' - exclude '**/*.proto.bin' + project.applyExcludes(delegate) } artifacts { diff --git a/spark-neoforge/build.gradle b/spark-neoforge/build.gradle index e0018eda..6404a502 100644 --- a/spark-neoforge/build.gradle +++ b/spark-neoforge/build.gradle @@ -71,11 +71,7 @@ shadowJar { relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws' - exclude 'module-info.class' - exclude 'META-INF/maven/**' - exclude 'META-INF/proguard/**' - exclude '**/*.proto' - exclude '**/*.proto.bin' + project.applyExcludes(delegate) mergeServiceFiles() } diff --git a/spark-paper/build.gradle b/spark-paper/build.gradle index dde868cb..1d3aa910 100644 --- a/spark-paper/build.gradle +++ b/spark-paper/build.gradle @@ -48,13 +48,7 @@ shadowJar { relocate 'me.lucko.spark.proto', 'me.lucko.spark.paper.proto' relocate 'spark-native', 'spark-paper-native' - exclude 'module-info.class' - exclude 'META-INF/maven/**' - exclude 'META-INF/proguard/**' - exclude '**/*.proto' - exclude '**/*.proto.bin' - exclude '**/*.proto' - exclude '**/*.proto.bin' + project.applyExcludes(delegate) } artifacts { diff --git a/spark-sponge/build.gradle b/spark-sponge/build.gradle index efb3321d..de2a1d37 100644 --- a/spark-sponge/build.gradle +++ b/spark-sponge/build.gradle @@ -36,11 +36,7 @@ shadowJar { relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws' - exclude 'module-info.class' - exclude 'META-INF/maven/**' - exclude 'META-INF/proguard/**' - exclude '**/*.proto' - exclude '**/*.proto.bin' + project.applyExcludes(delegate) } artifacts { diff --git a/spark-velocity/build.gradle b/spark-velocity/build.gradle index f0d2a350..ff1652ab 100644 --- a/spark-velocity/build.gradle +++ b/spark-velocity/build.gradle @@ -34,11 +34,7 @@ shadowJar { relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws' - exclude 'module-info.class' - exclude 'META-INF/maven/**' - exclude 'META-INF/proguard/**' - exclude '**/*.proto' - exclude '**/*.proto.bin' + project.applyExcludes(delegate) } artifacts {