Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Standalone profiling agent #480

Merged
merged 1 commit into from
Jan 12, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions settings.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -31,5 +31,6 @@ include (
'spark-neoforge',
'spark-paper',
'spark-sponge',
'spark-standalone-agent',
'spark-velocity',
)
Original file line number Diff line number Diff line change
Expand Up @@ -369,7 +369,7 @@ public CompletableFuture<Void> executeCommand(CommandSender sender, String[] arg
try {
executeCommand0(sender, args);
future.complete(null);
} catch (Exception e) {
} catch (Throwable e) {
this.plugin.log(Level.SEVERE, "Exception occurred whilst executing a spark command", e);
future.completeExceptionally(e);
} finally {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,8 @@ default Data toData() {
enum Type {
SERVER(PlatformMetadata.Type.SERVER),
CLIENT(PlatformMetadata.Type.CLIENT),
PROXY(PlatformMetadata.Type.PROXY);
PROXY(PlatformMetadata.Type.PROXY),
APPLICATION(PlatformMetadata.Type.APPLICATION);

private final PlatformMetadata.Type type;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ public PlatformStatistics getPlatformStatistics(Map<String, GarbageCollectorStat
List<CommandSender> senders = this.platform.getPlugin().getCommandSenders().collect(Collectors.toList());

PlatformInfo.Type platformType = this.platform.getPlugin().getPlatformInfo().getType();
if (platformType != PlatformInfo.Type.CLIENT) {
if (platformType == PlatformInfo.Type.SERVER || platformType == PlatformInfo.Type.PROXY) {
long playerCount = senders.size() - 1; // includes console
builder.setPlayerCount(playerCount);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,10 @@ private static Instrumentation loadInstrumentation(SparkPlugin plugin) {
private final Map<String, Class<?>> classes = new HashMap<>();

public InstrumentationClassFinder(SparkPlugin plugin) {
Instrumentation instrumentation = loadInstrumentation(plugin);
this(loadInstrumentation(plugin));
}

public InstrumentationClassFinder(Instrumentation instrumentation) {
if (instrumentation == null) {
return;
}
Expand Down
1 change: 1 addition & 0 deletions spark-common/src/main/proto/spark/spark.proto
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ message PlatformMetadata {
SERVER = 0;
CLIENT = 1;
PROXY = 2;
APPLICATION = 3;
}
}

Expand Down
62 changes: 62 additions & 0 deletions spark-standalone-agent/build.gradle
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
plugins {
id 'net.kyori.blossom' version '1.3.0'
id 'com.gradleup.shadow' version '8.3.0'
}

dependencies {
implementation project(':spark-common')
implementation('net.kyori:adventure-text-serializer-ansi:4.17.0') {
exclude(module: 'adventure-bom')
exclude(module: 'adventure-api')
exclude(module: 'annotations')
}
implementation 'org.slf4j:slf4j-simple:2.0.16'
implementation 'com.google.code.gson:gson:2.9.0'
implementation 'com.google.guava:guava:31.1-jre'

implementation 'org.jline:jline-remote-ssh:3.28.0'
implementation 'org.apache.sshd:sshd-core:2.14.0'
}

tasks.withType(JavaCompile).configureEach {
options.compilerArgs += ['--add-modules', 'jdk.attach']
options.release = 11
}

blossom {
replaceTokenIn('src/main/java/me/lucko/spark/standalone/StandaloneSparkPlugin.java')
replaceToken '@version@', project.pluginVersion
}

jar {
manifest {
attributes(
'Main-Class': 'me.lucko.spark.standalone.StandaloneSparkAgent',
'Agent-Class': 'me.lucko.spark.standalone.StandaloneSparkAgent',
'Premain-Class': 'me.lucko.spark.standalone.StandaloneSparkAgent'
)
}
}

shadowJar {
archiveFileName = "spark-${project.pluginVersion}-standalone-agent.jar"

relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure'
relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination'
relocate 'net.kyori.option', 'me.lucko.spark.lib.adventure.option'
relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy'
relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf'
relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler'
relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks'
relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws'
relocate 'com.google.gson', 'me.lucko.spark.lib.gson'
relocate 'com.google.common', 'me.lucko.spark.lib.guava'

project.applyExcludes(delegate)
}

artifacts {
archives shadowJar
shadow shadowJar
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
/*
* This file is part of spark.
*
* Copyright (c) lucko (Luck) <[email protected]>
* Copyright (c) contributors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/

package me.lucko.spark.standalone;

import me.lucko.spark.common.command.sender.AbstractCommandSender;
import net.kyori.adventure.text.Component;
import net.kyori.adventure.text.serializer.ansi.ANSIComponentSerializer;

import java.util.UUID;

public class StandaloneCommandSender extends AbstractCommandSender<StandaloneCommandSender.Output> {
public static final StandaloneCommandSender NO_OP = new StandaloneCommandSender(msg -> {});
public static final StandaloneCommandSender SYSTEM_OUT = new StandaloneCommandSender(System.out::println);

public StandaloneCommandSender(Output output) {
super(output);
}

@Override
public String getName() {
return "Standalone";
}

@Override
public UUID getUniqueId() {
return null;
}

@Override
public void sendMessage(Component message) {
this.delegate.sendMessage(ANSIComponentSerializer.ansi().serialize(message));
}

@Override
public boolean hasPermission(String permission) {
return true;
}

public interface Output {
void sendMessage(String message);
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
/*
* This file is part of spark.
*
* Copyright (c) lucko (Luck) <[email protected]>
* Copyright (c) contributors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/

package me.lucko.spark.standalone;

import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import me.lucko.spark.common.platform.PlatformInfo;

import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URL;

public class StandalonePlatformInfo implements PlatformInfo {
private final String version;
private final String minecraftVersion;

public StandalonePlatformInfo(String version) {
this.version = version;
this.minecraftVersion = detectVanillaMinecraftVersion();
}

@Override
public Type getType() {
return Type.APPLICATION;
}

@Override
public String getName() {
return "Standalone";
}

@Override
public String getBrand() {
return this.minecraftVersion != null ? "Vanilla Minecraft" : "Unknown";
}

@Override
public String getVersion() {
return this.version;
}

@Override
public String getMinecraftVersion() {
return this.minecraftVersion;
}

private static String detectVanillaMinecraftVersion() {
try {
Class<?> clazz = Class.forName("net.minecraft.bundler.Main");
URL resource = clazz.getClassLoader().getResource("version.json");
if (resource != null) {
try (InputStream stream = resource.openStream(); InputStreamReader reader = new InputStreamReader(stream)) {
JsonObject obj = new Gson().fromJson(reader, JsonObject.class);
JsonElement name = obj.get("name");
if (name.isJsonPrimitive() && name.getAsJsonPrimitive().isString()) {
return name.getAsString();
}
}
}
} catch (Exception e) {
// ignore
}
return null;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
/*
* This file is part of spark.
*
* Copyright (c) lucko (Luck) <[email protected]>
* Copyright (c) contributors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/

package me.lucko.spark.standalone;

import com.sun.tools.attach.VirtualMachine;
import com.sun.tools.attach.VirtualMachineDescriptor;

import java.lang.instrument.Instrumentation;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class StandaloneSparkAgent {

// Entry point when the agent is run as a normal jar
public static void main(String[] args) {
if (args.length == 0) {
System.err.println("Usage: java -jar spark-standalone-agent.jar <pid> [args...]");

List<VirtualMachineDescriptor> vms = VirtualMachine.list();
if (vms.isEmpty()) {
return;
}

System.out.println("Current JVM processes:");
for (VirtualMachineDescriptor vm : vms) {
System.out.println(" pid=" + vm.id() + " (" + vm.displayName() + ")");
}

return;
}

try {
VirtualMachine vm = VirtualMachine.attach(args[0]);
String agentPath = StandaloneSparkAgent.class.getProtectionDomain().getCodeSource().getLocation().getPath();
String arguments = String.join(",", Arrays.copyOfRange(args, 1, args.length));
vm.loadAgent(agentPath, arguments);
System.out.println("[spark] Agent loaded successfully.");
vm.detach();
} catch (Throwable e) {
System.err.println("Failed to attach agent to process " + args[0]);
e.printStackTrace(System.err);
}
}

// Entry point when the agent is loaded via -javaagent
public static void premain(String agentArgs, Instrumentation instrumentation) {
System.out.println("[spark] Loading standalone agent... (premain)");
init(agentArgs, instrumentation);
}

// Entry point when the agent is loaded via VirtualMachine#loadAgent
public static void agentmain(String agentArgs, Instrumentation instrumentation) {
System.out.println("[spark] Loading standalone agent... (agentmain)");
init(agentArgs, instrumentation);
}

private static void init(String agentArgs, Instrumentation instrumentation) {
try {
Map<String, String> arguments = new HashMap<>();
if (agentArgs == null) {
agentArgs = "";
}
for (String arg : agentArgs.split(",")) {
if (arg.contains("=")) {
String[] parts = arg.split("=", 2);
arguments.put(parts[0], parts[1]);
} else {
arguments.put(arg, "true");
}
}
new StandaloneSparkPlugin(instrumentation, arguments);
} catch (Throwable e) {
System.err.println("[spark] Loading failed :(");
e.printStackTrace(System.err);
}
}

}
Loading
Loading