2024-04-23 20:44:28 +02:00
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Jason Penilla <11360596+jpenilla@users.noreply.github.com>
Date: Sat, 29 Oct 2022 15:22:32 -0700
Subject: [PATCH] Plugin remapping
Co-authored-by: Nassim Jahnke <nassim@njahnke.dev>
diff --git a/build.gradle.kts b/build.gradle.kts
2024-04-23 21:23:27 +02:00
index 4e24a9dcad6c8b3f52c3039f2ba80dfedb911e03..ddd9bf846c7e918a1a36a6035b220db201c42440 100644
2024-04-23 20:44:28 +02:00
--- a/build.gradle.kts
+++ b/build.gradle.kts
2024-04-23 21:23:27 +02:00
@@ -46,9 +46,14 @@ dependencies {
2024-04-23 20:44:28 +02:00
testImplementation("org.mockito:mockito-core:5.11.0")
testImplementation("org.ow2.asm:asm-tree:9.7")
testImplementation("org.junit-pioneer:junit-pioneer:2.2.0") // Paper - CartesianTest
+ implementation("net.neoforged:AutoRenamingTool:2.0.3") // Paper - remap plugins
+ implementation("net.neoforged:srgutils:1.0.9") // Paper - remap plugins - bump transitive of ART
+}
+
+paperweight {
2024-04-23 21:23:27 +02:00
+ craftBukkitPackageVersion.set("v1_20_R4") // also needs to be updated in MappingEnvironment
2024-04-23 20:44:28 +02:00
}
2024-04-23 21:23:27 +02:00
-val craftbukkitPackageVersion = "1_20_R4" // Paper
2024-04-23 20:44:28 +02:00
tasks.jar {
archiveClassifier.set("dev")
2024-04-23 21:23:27 +02:00
@@ -68,7 +73,7 @@ tasks.jar {
2024-04-23 20:44:28 +02:00
"Specification-Vendor" to "Bukkit Team",
"Git-Branch" to gitBranch, // Paper
"Git-Commit" to gitHash, // Paper
- "CraftBukkit-Package-Version" to craftbukkitPackageVersion, // Paper
+ "CraftBukkit-Package-Version" to paperweight.craftBukkitPackageVersion.get(), // Paper
)
for (tld in setOf("net", "com", "org")) {
attributes("$tld/bukkit", "Sealed" to true)
2024-04-23 21:23:27 +02:00
@@ -148,20 +153,41 @@ val runtimeClasspathWithoutVanillaServer = configurations.runtimeClasspath.flatM
2024-04-23 20:44:28 +02:00
runtime.filterNot { it.asFile.absolutePath == vanilla }
}
-tasks.registerRunTask("runShadow") {
- description = "Spin up a test server from the shadowJar archiveFile"
- classpath(tasks.shadowJar.flatMap { it.archiveFile })
+tasks.registerRunTask("runServer") {
+ description = "Spin up a test server from the Mojang mapped server jar"
+ classpath(tasks.includeMappings.flatMap { it.outputJar })
classpath(runtimeClasspathWithoutVanillaServer)
}
-tasks.registerRunTask("runReobf") {
+tasks.registerRunTask("runReobfServer") {
description = "Spin up a test server from the reobfJar output jar"
classpath(tasks.reobfJar.flatMap { it.outputJar })
classpath(runtimeClasspathWithoutVanillaServer)
}
-tasks.registerRunTask("runDev") {
- description = "Spin up a non-relocated Mojang-mapped test server"
+tasks.registerRunTask("runDevServer") {
+ description = "Spin up a test server without assembling a jar"
classpath(sourceSets.main.map { it.runtimeClasspath })
jvmArgs("-DPaper.pushPaperAssetsRoot=true")
}
+
+tasks.registerRunTask("runBundler") {
+ description = "Spin up a test server from the Mojang mapped bundler jar"
+ classpath(rootProject.tasks.named<io.papermc.paperweight.tasks.CreateBundlerJar>("createMojmapBundlerJar").flatMap { it.outputZip })
+ mainClass.set(null as String?)
+}
+tasks.registerRunTask("runReobfBundler") {
+ description = "Spin up a test server from the reobf bundler jar"
+ classpath(rootProject.tasks.named<io.papermc.paperweight.tasks.CreateBundlerJar>("createReobfBundlerJar").flatMap { it.outputZip })
+ mainClass.set(null as String?)
+}
+tasks.registerRunTask("runPaperclip") {
+ description = "Spin up a test server from the Mojang mapped Paperclip jar"
+ classpath(rootProject.tasks.named<io.papermc.paperweight.tasks.CreatePaperclipJar>("createMojmapPaperclipJar").flatMap { it.outputZip })
+ mainClass.set(null as String?)
+}
+tasks.registerRunTask("runReobfPaperclip") {
+ description = "Spin up a test server from the reobf Paperclip jar"
+ classpath(rootProject.tasks.named<io.papermc.paperweight.tasks.CreatePaperclipJar>("createReobfPaperclipJar").flatMap { it.outputZip })
+ mainClass.set(null as String?)
+}
diff --git a/src/main/java/io/papermc/paper/plugin/PluginInitializerManager.java b/src/main/java/io/papermc/paper/plugin/PluginInitializerManager.java
index 708e5bb9bbf0476fcc2c4b92c6830b094703b43e..bb1cfa8ea8b11fc36ea72c8e382b8554bccd0ce5 100644
--- a/src/main/java/io/papermc/paper/plugin/PluginInitializerManager.java
+++ b/src/main/java/io/papermc/paper/plugin/PluginInitializerManager.java
@@ -6,10 +6,10 @@ import io.papermc.paper.plugin.entrypoint.Entrypoint;
import io.papermc.paper.plugin.entrypoint.LaunchEntryPointHandler;
import io.papermc.paper.plugin.provider.PluginProvider;
import io.papermc.paper.plugin.provider.type.paper.PaperPluginParent;
+import io.papermc.paper.pluginremap.PluginRemapper;
import joptsimple.OptionSet;
import net.minecraft.server.dedicated.DedicatedServer;
import org.bukkit.configuration.file.YamlConfiguration;
-import org.bukkit.craftbukkit.CraftServer;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.slf4j.Logger;
@@ -25,10 +25,14 @@ public class PluginInitializerManager {
private static PluginInitializerManager impl;
private final Path pluginDirectory;
private final Path updateDirectory;
+ public final io.papermc.paper.pluginremap.@org.checkerframework.checker.nullness.qual.MonotonicNonNull PluginRemapper pluginRemapper; // Paper
PluginInitializerManager(final Path pluginDirectory, final Path updateDirectory) {
this.pluginDirectory = pluginDirectory;
this.updateDirectory = updateDirectory;
+ this.pluginRemapper = Boolean.getBoolean("paper.disable-plugin-rewriting")
+ ? null
+ : PluginRemapper.create(pluginDirectory);
}
private static PluginInitializerManager parse(@NotNull final OptionSet minecraftOptionSet) throws Exception {
@@ -96,6 +100,7 @@ public class PluginInitializerManager {
public static void load(OptionSet optionSet) throws Exception {
// We have to load the bukkit configuration inorder to get the update folder location.
io.papermc.paper.plugin.PluginInitializerManager pluginSystem = io.papermc.paper.plugin.PluginInitializerManager.init(optionSet);
+ if (pluginSystem.pluginRemapper != null) pluginSystem.pluginRemapper.loadingPlugins();
// Register the default plugin directory
io.papermc.paper.plugin.util.EntrypointUtil.registerProvidersFromSource(io.papermc.paper.plugin.provider.source.DirectoryProviderSource.INSTANCE, pluginSystem.pluginDirectoryPath());
diff --git a/src/main/java/io/papermc/paper/plugin/provider/source/DirectoryProviderSource.java b/src/main/java/io/papermc/paper/plugin/provider/source/DirectoryProviderSource.java
index 226f457db6c1461c943c157b2b91e7450abc9dc6..0846d3a904e470ae1920c5c8be3df9c5dfc3de27 100644
--- a/src/main/java/io/papermc/paper/plugin/provider/source/DirectoryProviderSource.java
+++ b/src/main/java/io/papermc/paper/plugin/provider/source/DirectoryProviderSource.java
@@ -17,7 +17,7 @@ import org.slf4j.Logger;
public class DirectoryProviderSource implements ProviderSource<Path, List<Path>> {
public static final DirectoryProviderSource INSTANCE = new DirectoryProviderSource();
- private static final FileProviderSource FILE_PROVIDER_SOURCE = new FileProviderSource("Directory '%s'"::formatted);
+ private static final FileProviderSource FILE_PROVIDER_SOURCE = new FileProviderSource("Directory '%s'"::formatted, false); // Paper - Remap plugins
private static final Logger LOGGER = LogUtils.getClassLogger();
@Override
@@ -37,6 +37,11 @@ public class DirectoryProviderSource implements ProviderSource<Path, List<Path>>
LOGGER.error("Error preparing plugin context: " + e.getMessage(), e);
}
});
+ // Paper start - Remap plugins
+ if (io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper != null) {
+ return io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper.rewritePluginDirectory(files);
+ }
+ // Paper end - Remap plugins
return files;
}
diff --git a/src/main/java/io/papermc/paper/plugin/provider/source/FileProviderSource.java b/src/main/java/io/papermc/paper/plugin/provider/source/FileProviderSource.java
index 5b58df8df7efca0f67e3a14dd71051dfd7a26079..6b8ed8a0baaf4a57d20e57cec3400af5561ddd79 100644
--- a/src/main/java/io/papermc/paper/plugin/provider/source/FileProviderSource.java
+++ b/src/main/java/io/papermc/paper/plugin/provider/source/FileProviderSource.java
@@ -24,9 +24,15 @@ import java.util.jar.JarFile;
public class FileProviderSource implements ProviderSource<Path, Path> {
private final Function<Path, String> contextChecker;
+ private final boolean applyRemap;
- public FileProviderSource(Function<Path, String> contextChecker) {
+ public FileProviderSource(Function<Path, String> contextChecker, boolean applyRemap) {
this.contextChecker = contextChecker;
+ this.applyRemap = applyRemap;
+ }
+
+ public FileProviderSource(Function<Path, String> contextChecker) {
+ this(contextChecker, true);
}
@Override
@@ -50,6 +56,11 @@ public class FileProviderSource implements ProviderSource<Path, Path> {
} catch (Exception exception) {
throw new RuntimeException(source + " failed to update!", exception);
}
+ // Paper start - Remap plugins
+ if (this.applyRemap && io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper != null) {
+ context = io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper.rewritePlugin(context);
+ }
+ // Paper end - Remap plugins
return context;
}
diff --git a/src/main/java/io/papermc/paper/plugin/provider/source/PluginFlagProviderSource.java b/src/main/java/io/papermc/paper/plugin/provider/source/PluginFlagProviderSource.java
index ac55ae0e30119556f01e2e36c20fc63a111fae5f..c2b60c74513544e5d96110c7c3ff80e8f1b686d1 100644
--- a/src/main/java/io/papermc/paper/plugin/provider/source/PluginFlagProviderSource.java
+++ b/src/main/java/io/papermc/paper/plugin/provider/source/PluginFlagProviderSource.java
@@ -14,7 +14,7 @@ import java.util.List;
public class PluginFlagProviderSource implements ProviderSource<List<Path>, List<Path>> {
public static final PluginFlagProviderSource INSTANCE = new PluginFlagProviderSource();
- private static final FileProviderSource FILE_PROVIDER_SOURCE = new FileProviderSource("File '%s' specified through 'add-plugin' argument"::formatted);
+ private static final FileProviderSource FILE_PROVIDER_SOURCE = new FileProviderSource("File '%s' specified through 'add-plugin' argument"::formatted, false);
private static final Logger LOGGER = LogUtils.getClassLogger();
@Override
@@ -27,6 +27,11 @@ public class PluginFlagProviderSource implements ProviderSource<List<Path>, List
LOGGER.error("Error preparing plugin context: " + e.getMessage(), e);
}
}
+ // Paper start - Remap plugins
+ if (io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper != null && !files.isEmpty()) {
+ return io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper.rewriteExtraPlugins(files);
+ }
+ // Paper end - Remap plugins
return files;
}
diff --git a/src/main/java/io/papermc/paper/plugin/provider/type/PluginFileType.java b/src/main/java/io/papermc/paper/plugin/provider/type/PluginFileType.java
index 87128685015d550440a798028f50be24bc755f6c..8d0da6e46d4eb5eb05c3144510c4ef083559d0ec 100644
--- a/src/main/java/io/papermc/paper/plugin/provider/type/PluginFileType.java
+++ b/src/main/java/io/papermc/paper/plugin/provider/type/PluginFileType.java
@@ -22,9 +22,10 @@ import java.util.jar.JarFile;
*/
public abstract class PluginFileType<T, C extends PluginMeta> {
+ public static final String PAPER_PLUGIN_YML = "paper-plugin.yml";
private static final List<String> CONFIG_TYPES = new ArrayList<>();
- public static final PluginFileType<PaperPluginParent, PaperPluginMeta> PAPER = new PluginFileType<>("paper-plugin.yml", PaperPluginParent.FACTORY) {
+ public static final PluginFileType<PaperPluginParent, PaperPluginMeta> PAPER = new PluginFileType<>(PAPER_PLUGIN_YML, PaperPluginParent.FACTORY) {
@Override
protected void register(EntrypointHandler entrypointHandler, PaperPluginParent parent) {
PaperPluginParent.PaperBootstrapProvider bootstrapPluginProvider = null;
diff --git a/src/main/java/io/papermc/paper/pluginremap/DebugLogger.java b/src/main/java/io/papermc/paper/pluginremap/DebugLogger.java
new file mode 100644
index 0000000000000000000000000000000000000000..37024fe35f686490bba0df2e71d2198882e09db2
--- /dev/null
+++ b/src/main/java/io/papermc/paper/pluginremap/DebugLogger.java
@@ -0,0 +1,65 @@
+package io.papermc.paper.pluginremap;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.function.Consumer;
+import org.checkerframework.checker.nullness.qual.NonNull;
+import org.checkerframework.checker.nullness.qual.Nullable;
+import org.checkerframework.framework.qual.DefaultQualifier;
+
+/**
+ * {@link PrintWriter}-backed logger implementation for use with {@link net.neoforged.art.api.Renamer} which
+ * only opens the backing writer and logs messages when the {@link #DEBUG} system property
+ * is set to true.
+ */
+@DefaultQualifier(NonNull.class)
+final class DebugLogger implements Consumer<String>, AutoCloseable {
+ private static final boolean DEBUG = Boolean.getBoolean("paper.remap-debug");
+
+ private final @Nullable PrintWriter writer;
+
+ DebugLogger(final Path logFile) {
+ try {
+ this.writer = createWriter(logFile);
+ } catch (final IOException ex) {
+ throw new RuntimeException("Failed to initialize DebugLogger for file '" + logFile + "'", ex);
+ }
+ }
+
+ @Override
+ public void accept(final String line) {
+ this.useWriter(writer -> writer.println(line));
+ }
+
+ @Override
+ public void close() {
+ this.useWriter(PrintWriter::close);
+ }
+
+ private void useWriter(final Consumer<PrintWriter> op) {
+ final @Nullable PrintWriter writer = this.writer;
+ if (writer != null) {
+ op.accept(writer);
+ }
+ }
+
+ Consumer<String> debug() {
+ return line -> this.accept("[debug]: " + line);
+ }
+
+ static DebugLogger forOutputFile(final Path outputFile) {
+ return new DebugLogger(outputFile.resolveSibling(outputFile.getFileName() + ".log"));
+ }
+
+ private static @Nullable PrintWriter createWriter(final Path logFile) throws IOException {
+ if (!DEBUG) {
+ return null;
+ }
+ if (!Files.exists(logFile.getParent())) {
+ Files.createDirectories(logFile.getParent());
+ }
+ return new PrintWriter(logFile.toFile());
+ }
+}
diff --git a/src/main/java/io/papermc/paper/pluginremap/InsertManifestAttribute.java b/src/main/java/io/papermc/paper/pluginremap/InsertManifestAttribute.java
new file mode 100644
index 0000000000000000000000000000000000000000..3a5bb5d2a45654385ca0bc15c81ef95315b922b4
--- /dev/null
+++ b/src/main/java/io/papermc/paper/pluginremap/InsertManifestAttribute.java
@@ -0,0 +1,67 @@
+package io.papermc.paper.pluginremap;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.Collection;
+import java.util.List;
+import java.util.jar.Attributes;
+import java.util.jar.Manifest;
+import net.neoforged.art.api.Transformer;
+
+final class InsertManifestAttribute implements Transformer {
+ static final String PAPERWEIGHT_NAMESPACE_MANIFEST_KEY = "paperweight-mappings-namespace";
+ static final String MOJANG_NAMESPACE = "mojang";
+ static final String MOJANG_PLUS_YARN_NAMESPACE = "mojang+yarn";
+ static final String SPIGOT_NAMESPACE = "spigot";
+
+ private final String mainAttributesKey;
+ private final String namespace;
+ private final boolean createIfMissing;
+ private volatile boolean visitedManifest = false;
+
+ static Transformer addNamespaceManifestAttribute(final String namespace) {
+ return new InsertManifestAttribute(PAPERWEIGHT_NAMESPACE_MANIFEST_KEY, namespace, true);
+ }
+
+ InsertManifestAttribute(
+ final String mainAttributesKey,
+ final String namespace,
+ final boolean createIfMissing
+ ) {
+ this.mainAttributesKey = mainAttributesKey;
+ this.namespace = namespace;
+ this.createIfMissing = createIfMissing;
+ }
+
+ @Override
+ public ManifestEntry process(final ManifestEntry entry) {
+ this.visitedManifest = true;
+ try {
+ final Manifest manifest = new Manifest(new ByteArrayInputStream(entry.getData()));
+ manifest.getMainAttributes().putValue(this.mainAttributesKey, this.namespace);
+ final ByteArrayOutputStream out = new ByteArrayOutputStream();
+ manifest.write(out);
+ return ManifestEntry.create(Entry.STABLE_TIMESTAMP, out.toByteArray());
+ } catch (final IOException e) {
+ throw new RuntimeException("Failed to modify manifest", e);
+ }
+ }
+
+ @Override
+ public Collection<? extends Entry> getExtras() {
+ if (!this.visitedManifest && this.createIfMissing) {
+ final Manifest manifest = new Manifest();
+ manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0");
+ manifest.getMainAttributes().putValue(this.mainAttributesKey, this.namespace);
+ final ByteArrayOutputStream out = new ByteArrayOutputStream();
+ try {
+ manifest.write(out);
+ } catch (final IOException e) {
+ throw new RuntimeException("Failed to write manifest", e);
+ }
+ return List.of(ManifestEntry.create(Entry.STABLE_TIMESTAMP, out.toByteArray()));
+ }
+ return Transformer.super.getExtras();
+ }
+}
diff --git a/src/main/java/io/papermc/paper/pluginremap/PluginRemapper.java b/src/main/java/io/papermc/paper/pluginremap/PluginRemapper.java
new file mode 100644
index 0000000000000000000000000000000000000000..5408015a25d5e3d2149976e428c9d4be470de2ba
--- /dev/null
+++ b/src/main/java/io/papermc/paper/pluginremap/PluginRemapper.java
@@ -0,0 +1,369 @@
+package io.papermc.paper.pluginremap;
+
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import com.mojang.logging.LogUtils;
+import io.papermc.paper.util.AtomicFiles;
+import io.papermc.paper.util.MappingEnvironment;
+import io.papermc.paper.util.concurrent.ScalingThreadPool;
+import java.io.BufferedInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.FileSystem;
+import java.nio.file.FileSystems;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.CompletionException;
+import java.util.concurrent.Executor;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Predicate;
+import java.util.function.Supplier;
+import java.util.jar.Manifest;
+import java.util.stream.Stream;
+import net.minecraft.DefaultUncaughtExceptionHandlerWithName;
+import net.minecraft.util.ExceptionCollector;
+import net.neoforged.art.api.Renamer;
+import net.neoforged.art.api.SignatureStripperConfig;
+import net.neoforged.art.api.Transformer;
+import net.neoforged.srgutils.IMappingFile;
+import org.checkerframework.checker.nullness.qual.NonNull;
+import org.checkerframework.checker.nullness.qual.Nullable;
+import org.checkerframework.framework.qual.DefaultQualifier;
+import org.slf4j.Logger;
+
+import static io.papermc.paper.pluginremap.InsertManifestAttribute.addNamespaceManifestAttribute;
+
+@DefaultQualifier(NonNull.class)
+public final class PluginRemapper {
+ public static final boolean DEBUG_LOGGING = Boolean.getBoolean("Paper.PluginRemapperDebug");
+ private static final String PAPER_REMAPPED = ".paper-remapped";
+ private static final String UNKNOWN_ORIGIN = "unknown-origin";
+ private static final String EXTRA_PLUGINS = "extra-plugins";
+ private static final String REMAP_CLASSPATH = "remap-classpath";
+ private static final String REVERSED_MAPPINGS = "mappings/reversed";
+ private static final Logger LOGGER = LogUtils.getClassLogger();
+
+ private final ExecutorService threadPool;
+ private final ReobfServer reobf;
+ private final RemappedPluginIndex remappedPlugins;
+ private final RemappedPluginIndex extraPlugins;
+ private final UnknownOriginRemappedPluginIndex unknownOrigin;
+ private @Nullable CompletableFuture<IMappingFile> reversedMappings;
+
+ public PluginRemapper(final Path pluginsDir) {
+ this.threadPool = createThreadPool();
+ final CompletableFuture<IMappingFile> mappings = CompletableFuture.supplyAsync(PluginRemapper::loadReobfMappings, this.threadPool);
+ final Path remappedPlugins = pluginsDir.resolve(PAPER_REMAPPED);
+ this.reversedMappings = this.reversedMappingsFuture(() -> mappings, remappedPlugins, this.threadPool);
+ this.reobf = new ReobfServer(remappedPlugins.resolve(REMAP_CLASSPATH), mappings, this.threadPool);
+ this.remappedPlugins = new RemappedPluginIndex(remappedPlugins, false);
+ this.extraPlugins = new RemappedPluginIndex(this.remappedPlugins.dir().resolve(EXTRA_PLUGINS), true);
+ this.unknownOrigin = new UnknownOriginRemappedPluginIndex(this.remappedPlugins.dir().resolve(UNKNOWN_ORIGIN));
+ }
+
+ public static @Nullable PluginRemapper create(final Path pluginsDir) {
+ if (MappingEnvironment.reobf() || !MappingEnvironment.hasMappings()) {
+ return null;
+ }
+
+ return new PluginRemapper(pluginsDir);
+ }
+
+ public void shutdown() {
+ this.threadPool.shutdown();
+ this.save(true);
+ boolean didShutdown;
+ try {
+ didShutdown = this.threadPool.awaitTermination(3L, TimeUnit.SECONDS);
+ } catch (final InterruptedException ex) {
+ didShutdown = false;
+ }
+ if (!didShutdown) {
+ this.threadPool.shutdownNow();
+ }
+ }
+
+ public void save(final boolean clean) {
+ this.remappedPlugins.write();
+ this.extraPlugins.write();
+ this.unknownOrigin.write(clean);
+ }
+
+ // Called on startup and reload
+ public void loadingPlugins() {
+ if (this.reversedMappings == null) {
+ this.reversedMappings = this.reversedMappingsFuture(
+ () -> CompletableFuture.supplyAsync(PluginRemapper::loadReobfMappings, this.threadPool),
+ this.remappedPlugins.dir(),
+ this.threadPool
+ );
+ }
+ }
+
+ // Called after all plugins enabled during startup/reload
+ public void pluginsEnabled() {
+ this.reversedMappings = null;
+ this.save(false);
+ }
+
+ public Path rewritePlugin(final Path plugin) {
+ // Already remapped
+ if (plugin.getParent().equals(this.remappedPlugins.dir())
+ || plugin.getParent().equals(this.extraPlugins.dir())) {
+ return plugin;
+ }
+
+ final @Nullable Path cached = this.unknownOrigin.getIfPresent(plugin);
+ if (cached != null) {
+ if (DEBUG_LOGGING) {
+ LOGGER.info("Plugin '{}' has not changed since last remap.", plugin);
+ }
+ return cached;
+ }
+
+ return this.remapPlugin(this.unknownOrigin, plugin).join();
+ }
+
+ public List<Path> rewriteExtraPlugins(final List<Path> plugins) {
+ final @Nullable List<Path> allCached = this.extraPlugins.getAllIfPresent(plugins);
+ if (allCached != null) {
+ if (DEBUG_LOGGING) {
+ LOGGER.info("All extra plugins have a remapped variant cached.");
+ }
+ return allCached;
+ }
+
+ final List<CompletableFuture<Path>> tasks = new ArrayList<>();
+ for (final Path file : plugins) {
+ final @Nullable Path cached = this.extraPlugins.getIfPresent(file);
+ if (cached != null) {
+ if (DEBUG_LOGGING) {
+ LOGGER.info("Extra plugin '{}' has not changed since last remap.", file);
+ }
+ tasks.add(CompletableFuture.completedFuture(cached));
+ continue;
+ }
+ tasks.add(this.remapPlugin(this.extraPlugins, file));
+ }
+ return waitForAll(tasks);
+ }
+
+ public List<Path> rewritePluginDirectory(final List<Path> jars) {
+ final @Nullable List<Path> remappedJars = this.remappedPlugins.getAllIfPresent(jars);
+ if (remappedJars != null) {
+ if (DEBUG_LOGGING) {
+ LOGGER.info("All plugins have a remapped variant cached.");
+ }
+ return remappedJars;
+ }
+
+ final List<CompletableFuture<Path>> tasks = new ArrayList<>();
+ for (final Path file : jars) {
+ final @Nullable Path existingFile = this.remappedPlugins.getIfPresent(file);
+ if (existingFile != null) {
+ if (DEBUG_LOGGING) {
+ LOGGER.info("Plugin '{}' has not changed since last remap.", file);
+ }
+ tasks.add(CompletableFuture.completedFuture(existingFile));
+ continue;
+ }
+
+ tasks.add(this.remapPlugin(this.remappedPlugins, file));
+ }
+ return waitForAll(tasks);
+ }
+
+ private static IMappingFile reverse(final IMappingFile mappings) {
+ if (DEBUG_LOGGING) {
+ LOGGER.info("Reversing mappings...");
+ }
+ final long start = System.currentTimeMillis();
+ final IMappingFile reversed = mappings.reverse();
+ if (DEBUG_LOGGING) {
+ LOGGER.info("Done reversing mappings in {}ms.", System.currentTimeMillis() - start);
+ }
+ return reversed;
+ }
+
+ private CompletableFuture<IMappingFile> reversedMappingsFuture(
+ final Supplier<CompletableFuture<IMappingFile>> mappingsFuture,
+ final Path remappedPlugins,
+ final Executor executor
+ ) {
+ return CompletableFuture.supplyAsync(() -> {
+ try {
+ final String mappingsHash = MappingEnvironment.mappingsHash();
+ final String fName = mappingsHash + ".tiny";
+ final Path reversedMappings1 = remappedPlugins.resolve(REVERSED_MAPPINGS);
+ final Path file = reversedMappings1.resolve(fName);
+ if (Files.isDirectory(reversedMappings1)) {
+ if (Files.isRegularFile(file)) {
+ return CompletableFuture.completedFuture(
+ loadMappings("Reversed", Files.newInputStream(file))
+ );
+ } else {
+ for (final Path oldFile : list(reversedMappings1, Files::isRegularFile)) {
+ Files.delete(oldFile);
+ }
+ }
+ } else {
+ Files.createDirectories(reversedMappings1);
+ }
+ return mappingsFuture.get().thenApply(loadedMappings -> {
+ final IMappingFile reversed = reverse(loadedMappings);
+ try {
+ AtomicFiles.atomicWrite(file, writeTo -> {
+ reversed.write(writeTo, IMappingFile.Format.TINY, false);
+ });
+ } catch (final IOException e) {
+ throw new RuntimeException("Failed to write reversed mappings", e);
+ }
+ return reversed;
+ });
+ } catch (final IOException e) {
+ throw new RuntimeException("Failed to load reversed mappings", e);
+ }
+ }, executor).thenCompose(f -> f);
+ }
+
+ /**
+ * Returns the remapped file if remapping was necessary, otherwise null.
+ *
+ * @param index remapped plugin index
+ * @param inputFile input file
+ * @return remapped file, or inputFile if no remapping was necessary
+ */
+ private CompletableFuture<Path> remapPlugin(final RemappedPluginIndex index, final Path inputFile) {
+ final Path destination = index.input(inputFile);
+
+ try (final FileSystem fs = FileSystems.newFileSystem(inputFile, new HashMap<>())) {
+ // Leave dummy files if no remapping is required, so that we can check if they exist without copying the whole file
+ /*if (Files.exists(fs.getPath(PluginFileType.PAPER_PLUGIN_YML))) { // TODO Uncomment on release
+ if (DEBUG_LOGGING) {
+ LOGGER.info("Plugin '{}' is a Paper plugin, no remapping necessary.", inputFile);
+ }
+ index.skip(inputFile);
+ return CompletableFuture.completedFuture(inputFile);
+ } else {*/
+ // Check for paperweight mojang mapped marker
+ final Path manifestPath = fs.getPath("META-INF/MANIFEST.MF");
+ if (Files.exists(manifestPath)) {
+ final Manifest manifest;
+ try (final InputStream in = new BufferedInputStream(Files.newInputStream(manifestPath))) {
+ manifest = new Manifest(in);
+ }
+ final String ns = manifest.getMainAttributes().getValue(InsertManifestAttribute.PAPERWEIGHT_NAMESPACE_MANIFEST_KEY);
+ if (ns != null && (ns.equals(InsertManifestAttribute.MOJANG_NAMESPACE) || ns.equals(InsertManifestAttribute.MOJANG_PLUS_YARN_NAMESPACE))) {
+ if (DEBUG_LOGGING) {
+ LOGGER.info("Plugin '{}' is already Mojang mapped.", inputFile);
+ }
+ index.skip(inputFile);
+ return CompletableFuture.completedFuture(inputFile);
+ }
+ }
+ //}
+ } catch (final IOException ex) {
+ throw new RuntimeException("Failed to open plugin jar " + inputFile, ex);
+ }
+
+ return this.reobf.remapped().thenApplyAsync(reobfServer -> {
+ LOGGER.info("Remapping plugin '{}'...", inputFile);
+ final long start = System.currentTimeMillis();
+ try (final DebugLogger logger = DebugLogger.forOutputFile(destination)) {
+ try (final Renamer renamer = Renamer.builder()
+ .add(Transformer.renamerFactory(this.mappings(), false))
+ .add(addNamespaceManifestAttribute(InsertManifestAttribute.MOJANG_PLUS_YARN_NAMESPACE))
+ .add(Transformer.signatureStripperFactory(SignatureStripperConfig.ALL))
+ .lib(reobfServer.toFile())
+ .threads(1)
+ .logger(logger)
+ .debug(logger.debug())
+ .build()) {
+ renamer.run(inputFile.toFile(), destination.toFile());
+ }
+ } catch (final Exception ex) {
+ throw new RuntimeException("Failed to remap plugin jar '" + inputFile + "'", ex);
+ }
+ LOGGER.info("Done remapping plugin '{}' in {}ms.", inputFile, System.currentTimeMillis() - start);
+ return destination;
+ }, this.threadPool);
+ }
+
+ private IMappingFile mappings() {
+ final @Nullable CompletableFuture<IMappingFile> mappings = this.reversedMappings;
+ if (mappings == null) {
+ return this.reversedMappingsFuture(
+ () -> CompletableFuture.supplyAsync(PluginRemapper::loadReobfMappings, Runnable::run),
+ this.remappedPlugins.dir(),
+ Runnable::run
+ ).join();
+ }
+ return mappings.join();
+ }
+
+ private static IMappingFile loadReobfMappings() {
+ return loadMappings("Reobf", MappingEnvironment.mappingsStream());
+ }
+
+ private static IMappingFile loadMappings(final String name, final InputStream stream) {
+ try (stream) {
+ if (DEBUG_LOGGING) {
+ LOGGER.info("Loading {} mappings...", name);
+ }
+ final long start = System.currentTimeMillis();
+ final IMappingFile load = IMappingFile.load(stream);
+ if (DEBUG_LOGGING) {
+ LOGGER.info("Done loading {} mappings in {}ms.", name, System.currentTimeMillis() - start);
+ }
+ return load;
+ } catch (final IOException ex) {
+ throw new RuntimeException("Failed to load " + name + " mappings", ex);
+ }
+ }
+
+ static List<Path> list(final Path dir, final Predicate<Path> filter) {
+ try (final Stream<Path> stream = Files.list(dir)) {
+ return stream.filter(filter).toList();
+ } catch (final IOException ex) {
+ throw new RuntimeException("Failed to list directory '" + dir + "'", ex);
+ }
+ }
+
+ private static List<Path> waitForAll(final List<CompletableFuture<Path>> tasks) {
+ final ExceptionCollector<Exception> collector = new ExceptionCollector<>();
+ final List<Path> ret = new ArrayList<>();
+ for (final CompletableFuture<Path> task : tasks) {
+ try {
+ ret.add(task.join());
+ } catch (final CompletionException ex) {
+ collector.add(ex);
+ }
+ }
+ try {
+ collector.throwIfPresent();
+ } catch (final Exception ex) {
+ throw new RuntimeException("Encountered exception remapping plugins", ex);
+ }
+ return ret;
+ }
+
+ private static ThreadPoolExecutor createThreadPool() {
+ return new ThreadPoolExecutor(
+ 0,
+ 4,
+ 5L,
+ TimeUnit.SECONDS,
+ ScalingThreadPool.createUnboundedQueue(),
+ new ThreadFactoryBuilder()
+ .setNameFormat("Paper Plugin Remapper Thread - %1$d")
+ .setUncaughtExceptionHandler(new DefaultUncaughtExceptionHandlerWithName(LOGGER))
+ .build(),
+ ScalingThreadPool.defaultReEnqueuePolicy()
+ );
+ }
+}
diff --git a/src/main/java/io/papermc/paper/pluginremap/RemappedPluginIndex.java b/src/main/java/io/papermc/paper/pluginremap/RemappedPluginIndex.java
new file mode 100644
index 0000000000000000000000000000000000000000..86fc60452404d1f4609c25a90c4803ffb80dc8ab
--- /dev/null
+++ b/src/main/java/io/papermc/paper/pluginremap/RemappedPluginIndex.java
@@ -0,0 +1,212 @@
+package io.papermc.paper.pluginremap;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.mojang.logging.LogUtils;
+import io.papermc.paper.util.Hashing;
+import io.papermc.paper.util.MappingEnvironment;
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.Function;
+import org.checkerframework.checker.nullness.qual.NonNull;
+import org.checkerframework.checker.nullness.qual.Nullable;
+import org.checkerframework.framework.qual.DefaultQualifier;
+import org.slf4j.Logger;
+import org.spongepowered.configurate.loader.AtomicFiles;
+
+@DefaultQualifier(NonNull.class)
+class RemappedPluginIndex {
+ private static final Logger LOGGER = LogUtils.getLogger();
+ private static final Gson GSON = new GsonBuilder()
+ .setPrettyPrinting()
+ .create();
+ private static final String INDEX_FILE_NAME = "index.json";
+
+ protected final State state;
+ private final Path dir;
+ private final Path indexFile;
+ private final boolean handleDuplicateFileNames;
+
+ // todo maybe hash remapped variants to ensure they haven't changed? probably unneeded
+ static final class State {
+ final Map<String, String> hashes = new HashMap<>();
+ final Set<String> skippedHashes = new HashSet<>();
+ private final String mappingsHash = MappingEnvironment.mappingsHash();
+ }
+
+ RemappedPluginIndex(final Path dir, final boolean handleDuplicateFileNames) {
+ this.dir = dir;
+ this.handleDuplicateFileNames = handleDuplicateFileNames;
+ if (!Files.exists(this.dir)) {
+ try {
+ Files.createDirectories(this.dir);
+ } catch (final IOException ex) {
+ throw new RuntimeException(ex);
+ }
+ }
+
+ this.indexFile = dir.resolve(INDEX_FILE_NAME);
+ if (Files.isRegularFile(this.indexFile)) {
+ try {
+ this.state = this.readIndex();
+ } catch (final IOException e) {
+ throw new RuntimeException(e);
+ }
+ } else {
+ this.state = new State();
+ }
+ }
+
+ private State readIndex() throws IOException {
+ final State state;
+ try (final BufferedReader reader = Files.newBufferedReader(this.indexFile)) {
+ state = GSON.fromJson(reader, State.class);
+ }
+
+ // If mappings have changed, delete all cached files and create a new index
+ if (!state.mappingsHash.equals(MappingEnvironment.mappingsHash())) {
+ for (final String fileName : state.hashes.values()) {
+ Files.deleteIfExists(this.dir.resolve(fileName));
+ }
+ return new State();
+ }
+ return state;
+ }
+
+ Path dir() {
+ return this.dir;
+ }
+
+ /**
+ * Returns a list of cached paths if all of the input paths are present in the cache.
+ * The returned list may contain paths from different directories.
+ *
+ * @param paths plugin jar paths to check
+ * @return null if any of the paths are not present in the cache, otherwise a list of the cached paths
+ */
+ @Nullable List<Path> getAllIfPresent(final List<Path> paths) {
+ final Map<Path, String> hashCache = new HashMap<>();
+ final Function<Path, String> inputFileHash = path -> hashCache.computeIfAbsent(path, Hashing::sha256);
+
+ // Delete cached entries we no longer need
+ final Iterator<Map.Entry<String, String>> iterator = this.state.hashes.entrySet().iterator();
+ while (iterator.hasNext()) {
+ final Map.Entry<String, String> entry = iterator.next();
+ final String inputHash = entry.getKey();
+ final String fileName = entry.getValue();
+ if (paths.stream().anyMatch(path -> inputFileHash.apply(path).equals(inputHash))) {
+ // Hash is used, keep it
+ continue;
+ }
+
+ iterator.remove();
+ try {
+ Files.deleteIfExists(this.dir.resolve(fileName));
+ } catch (final IOException ex) {
+ throw new RuntimeException(ex);
+ }
+ }
+
+ // Also clear hashes of skipped files
+ this.state.skippedHashes.removeIf(hash -> paths.stream().noneMatch(path -> inputFileHash.apply(path).equals(hash)));
+
+ final List<Path> ret = new ArrayList<>();
+ for (final Path path : paths) {
+ final String inputHash = inputFileHash.apply(path);
+ if (this.state.skippedHashes.contains(inputHash)) {
+ // Add the original path
+ ret.add(path);
+ continue;
+ }
+
+ final @Nullable Path cached = this.getIfPresent(inputHash);
+ if (cached == null) {
+ // Missing the remapped file
+ return null;
+ }
+ ret.add(cached);
+ }
+ return ret;
+ }
+
+ private String createCachedFileName(final Path in) {
+ if (this.handleDuplicateFileNames) {
+ final String fileName = in.getFileName().toString();
+ final int i = fileName.lastIndexOf(".jar");
+ return fileName.substring(0, i) + "-" + System.currentTimeMillis() + ".jar";
+ }
+ return in.getFileName().toString();
+ }
+
+ /**
+ * Returns the given path if the file was previously skipped for being remapped, otherwise the cached path or null.
+ *
+ * @param in input file
+ * @return {@code in} if already remapped, the cached path if present, otherwise null
+ */
+ @Nullable Path getIfPresent(final Path in) {
+ final String inHash = Hashing.sha256(in);
+ if (this.state.skippedHashes.contains(inHash)) {
+ return in;
+ }
+ return this.getIfPresent(inHash);
+ }
+
+ /**
+ * Returns the cached path if a remapped file is present for the given hash, otherwise null.
+ *
+ * @param inHash hash of the input file
+ * @return the cached path if present, otherwise null
+ * @see #getIfPresent(Path)
+ */
+ protected @Nullable Path getIfPresent(final String inHash) {
+ final @Nullable String fileName = this.state.hashes.get(inHash);
+ if (fileName == null) {
+ return null;
+ }
+
+ final Path path = this.dir.resolve(fileName);
+ if (Files.exists(path)) {
+ return path;
+ }
+ return null;
+ }
+
+ Path input(final Path in) {
+ return this.input(in, Hashing.sha256(in));
+ }
+
+ /**
+ * Marks the given file as skipped for remapping.
+ *
+ * @param in input file
+ */
+ void skip(final Path in) {
+ this.state.skippedHashes.add(Hashing.sha256(in));
+ }
+
+ protected Path input(final Path in, final String hashString) {
+ final String name = this.createCachedFileName(in);
+ this.state.hashes.put(hashString, name);
+ return this.dir.resolve(name);
+ }
+
+ void write() {
+ try (final BufferedWriter writer = AtomicFiles.atomicBufferedWriter(this.indexFile, StandardCharsets.UTF_8)) {
+ GSON.toJson(this.state, writer);
+ } catch (final IOException ex) {
+ LOGGER.warn("Failed to write index file '{}'", this.indexFile, ex);
+ }
+ }
+}
diff --git a/src/main/java/io/papermc/paper/pluginremap/ReobfServer.java b/src/main/java/io/papermc/paper/pluginremap/ReobfServer.java
new file mode 100644
index 0000000000000000000000000000000000000000..aa5bf7ae042f3d43f7612d027ebef0e5c758ffc9
--- /dev/null
+++ b/src/main/java/io/papermc/paper/pluginremap/ReobfServer.java
@@ -0,0 +1,92 @@
+package io.papermc.paper.pluginremap;
+
+import com.mojang.logging.LogUtils;
+import io.papermc.paper.util.AtomicFiles;
+import io.papermc.paper.util.MappingEnvironment;
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.Executor;
+import net.neoforged.art.api.Renamer;
+import net.neoforged.art.api.Transformer;
+import net.neoforged.art.internal.RenamerImpl;
+import net.neoforged.srgutils.IMappingFile;
+import org.checkerframework.checker.nullness.qual.NonNull;
+import org.checkerframework.framework.qual.DefaultQualifier;
+import org.slf4j.Logger;
+
+import static io.papermc.paper.pluginremap.InsertManifestAttribute.addNamespaceManifestAttribute;
+
+@DefaultQualifier(NonNull.class)
+final class ReobfServer {
+ private static final Logger LOGGER = LogUtils.getClassLogger();
+
+ private final Path remapClasspathDir;
+ private final CompletableFuture<Void> load;
+
+ ReobfServer(final Path remapClasspathDir, final CompletableFuture<IMappingFile> mappings, final Executor executor) {
+ this.remapClasspathDir = remapClasspathDir;
+ if (this.mappingsChanged()) {
+ this.load = mappings.thenAcceptAsync(this::remap, executor);
+ } else {
+ if (PluginRemapper.DEBUG_LOGGING) {
+ LOGGER.info("Have cached reobf server for current mappings.");
+ }
+ this.load = CompletableFuture.completedFuture(null);
+ }
+ }
+
+ CompletableFuture<Path> remapped() {
+ return this.load.thenApply($ -> this.remappedPath());
+ }
+
+ private Path remappedPath() {
+ return this.remapClasspathDir.resolve(MappingEnvironment.mappingsHash() + ".jar");
+ }
+
+ private boolean mappingsChanged() {
+ return !Files.exists(this.remappedPath());
+ }
+
+ private void remap(final IMappingFile mappings) {
+ try {
+ if (!Files.exists(this.remapClasspathDir)) {
+ Files.createDirectories(this.remapClasspathDir);
+ }
+ for (final Path file : PluginRemapper.list(this.remapClasspathDir, Files::isRegularFile)) {
+ Files.delete(file);
+ }
+ } catch (final IOException ex) {
+ throw new RuntimeException(ex);
+ }
+
+ LOGGER.info("Remapping server...");
+ final long startRemap = System.currentTimeMillis();
+ try (final DebugLogger log = DebugLogger.forOutputFile(this.remappedPath())) {
+ AtomicFiles.atomicWrite(this.remappedPath(), writeTo -> {
+ try (final RenamerImpl renamer = (RenamerImpl) Renamer.builder()
+ .logger(log)
+ .debug(log.debug())
+ .threads(1)
+ .add(Transformer.renamerFactory(mappings, false))
+ .add(addNamespaceManifestAttribute(InsertManifestAttribute.SPIGOT_NAMESPACE))
+ .build()) {
+ renamer.run(serverJar().toFile(), writeTo.toFile(), true);
+ }
+ });
+ } catch (final Exception ex) {
+ throw new RuntimeException("Failed to remap server jar", ex);
+ }
+ LOGGER.info("Done remapping server in {}ms.", System.currentTimeMillis() - startRemap);
+ }
+
+ private static Path serverJar() {
+ try {
+ return Path.of(ReobfServer.class.getProtectionDomain().getCodeSource().getLocation().toURI());
+ } catch (final URISyntaxException ex) {
+ throw new RuntimeException(ex);
+ }
+ }
+}
diff --git a/src/main/java/io/papermc/paper/pluginremap/UnknownOriginRemappedPluginIndex.java b/src/main/java/io/papermc/paper/pluginremap/UnknownOriginRemappedPluginIndex.java
new file mode 100644
index 0000000000000000000000000000000000000000..ad53aab4fee16b76f6e4fd44e4b28d06fef80de4
--- /dev/null
+++ b/src/main/java/io/papermc/paper/pluginremap/UnknownOriginRemappedPluginIndex.java
@@ -0,0 +1,72 @@
+package io.papermc.paper.pluginremap;
+
+import com.mojang.logging.LogUtils;
+import io.papermc.paper.util.Hashing;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+import org.checkerframework.checker.nullness.qual.NonNull;
+import org.checkerframework.checker.nullness.qual.Nullable;
+import org.checkerframework.framework.qual.DefaultQualifier;
+import org.slf4j.Logger;
+
+@DefaultQualifier(NonNull.class)
+final class UnknownOriginRemappedPluginIndex extends RemappedPluginIndex {
+ private static final Logger LOGGER = LogUtils.getLogger();
+
+ private final Set<String> used = new HashSet<>();
+
+ UnknownOriginRemappedPluginIndex(final Path dir) {
+ super(dir, true);
+ }
+
+ @Override
+ @Nullable Path getIfPresent(final Path in) {
+ final String hash = Hashing.sha256(in);
+ if (this.state.skippedHashes.contains(hash)) {
+ return in;
+ }
+
+ final @Nullable Path path = super.getIfPresent(hash);
+ if (path != null) {
+ this.used.add(hash);
+ }
+ return path;
+ }
+
+ @Override
+ Path input(final Path in) {
+ final String hash = Hashing.sha256(in);
+ this.used.add(hash);
+ return super.input(in, hash);
+ }
+
+ void write(final boolean clean) {
+ if (!clean) {
+ super.write();
+ return;
+ }
+
+ final Iterator<Map.Entry<String, String>> it = this.state.hashes.entrySet().iterator();
+ while (it.hasNext()) {
+ final Map.Entry<String, String> next = it.next();
+ if (this.used.contains(next.getKey())) {
+ continue;
+ }
+
+ // Remove unused mapped file
+ it.remove();
+ final Path file = this.dir().resolve(next.getValue());
+ try {
+ Files.deleteIfExists(file);
+ } catch (final IOException ex) {
+ LOGGER.warn("Failed to delete no longer needed cached jar '{}'", file, ex);
+ }
+ }
+ super.write();
+ }
+}
diff --git a/src/main/java/io/papermc/paper/util/AtomicFiles.java b/src/main/java/io/papermc/paper/util/AtomicFiles.java
new file mode 100644
index 0000000000000000000000000000000000000000..944250d2b8e1969f221b2f24cce7b1019c55fe01
--- /dev/null
+++ b/src/main/java/io/papermc/paper/util/AtomicFiles.java
@@ -0,0 +1,96 @@
+package io.papermc.paper.util;
+
+import java.io.IOException;
+import java.nio.file.AccessDeniedException;
+import java.nio.file.AtomicMoveNotSupportedException;
+import java.nio.file.CopyOption;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.StandardCopyOption;
+import java.util.concurrent.ThreadLocalRandom;
+import java.util.function.Consumer;
+import org.spongepowered.configurate.util.CheckedConsumer;
+
+// Stripped down version of https://github.com/jpenilla/squaremap/blob/7d7994b4096e5fc61364ea2d87e9aa4e14edf5c6/common/src/main/java/xyz/jpenilla/squaremap/common/util/FileUtil.java
+public final class AtomicFiles {
+
+ private AtomicFiles() {
+ }
+
+ public static void atomicWrite(final Path path, final CheckedConsumer<Path, IOException> op) throws IOException {
+ final Path tmp = tempFile(path);
+
+ try {
+ op.accept(tmp);
+ atomicMove(tmp, path, true);
+ } catch (final IOException ex) {
+ try {
+ Files.deleteIfExists(tmp);
+ } catch (final IOException ex1) {
+ ex.addSuppressed(ex1);
+ }
+ throw ex;
+ }
+ }
+
+ private static Path tempFile(final Path path) {
+ return path.resolveSibling("." + System.nanoTime() + "-" + ThreadLocalRandom.current().nextInt() + "-" + path.getFileName().toString() + ".tmp"); }
+
+ @SuppressWarnings("BusyWait") // not busy waiting
+ public static void atomicMove(final Path from, final Path to, final boolean replaceExisting) throws IOException {
+ final int maxRetries = 2;
+
+ try {
+ atomicMoveIfPossible(from, to, replaceExisting);
+ } catch (final AccessDeniedException ex) {
+ // Sometimes because of file locking this will fail... Let's just try again and hope for the best
+ // Thanks Windows!
+ int retries = 1;
+ while (true) {
+ try {
+ // Pause for a bit
+ Thread.sleep(10L * retries);
+ atomicMoveIfPossible(from, to, replaceExisting);
+ break; // success
+ } catch (final AccessDeniedException ex1) {
+ ex.addSuppressed(ex1);
+ if (retries == maxRetries) {
+ throw ex;
+ }
+ } catch (final InterruptedException interruptedException) {
+ ex.addSuppressed(interruptedException);
+ Thread.currentThread().interrupt();
+ throw ex;
+ }
+ ++retries;
+ }
+ }
+ }
+
+ private static void atomicMoveIfPossible(final Path from, final Path to, final boolean replaceExisting) throws IOException {
+ final CopyOption[] options = replaceExisting
+ ? new CopyOption[]{StandardCopyOption.ATOMIC_MOVE, StandardCopyOption.REPLACE_EXISTING}
+ : new CopyOption[]{StandardCopyOption.ATOMIC_MOVE};
+
+ try {
+ Files.move(from, to, options);
+ } catch (final AtomicMoveNotSupportedException ex) {
+ Files.move(from, to, replaceExisting ? new CopyOption[]{StandardCopyOption.REPLACE_EXISTING} : new CopyOption[]{});
+ }
+ }
+
+ private static <T, X extends Throwable> Consumer<T> sneaky(final CheckedConsumer<T, X> consumer) {
+ return t -> {
+ try {
+ consumer.accept(t);
+ } catch (final Throwable thr) {
+ rethrow(thr);
+ }
+ };
+ }
+
+ @SuppressWarnings("unchecked")
+ private static <X extends Throwable> RuntimeException rethrow(final Throwable t) throws X {
+ throw (X) t;
+ }
+}
diff --git a/src/main/java/io/papermc/paper/util/Hashing.java b/src/main/java/io/papermc/paper/util/Hashing.java
new file mode 100644
index 0000000000000000000000000000000000000000..49e235e0035d7f063c8544ec10bd8f9ef4e613c3
--- /dev/null
+++ b/src/main/java/io/papermc/paper/util/Hashing.java
@@ -0,0 +1,50 @@
+package io.papermc.paper.util;
+
+import com.google.common.hash.HashCode;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Locale;
+import org.apache.commons.io.IOUtils;
+import org.checkerframework.checker.nullness.qual.NonNull;
+import org.checkerframework.framework.qual.DefaultQualifier;
+
+@DefaultQualifier(NonNull.class)
+public final class Hashing {
+ private Hashing() {
+ }
+
+ /**
+ * Hash the provided {@link InputStream} using SHA-256. Stream will be closed.
+ *
+ * @param stream input stream
+ * @return SHA-256 hash string
+ */
+ public static String sha256(final InputStream stream) {
+ try (stream) {
+ return com.google.common.hash.Hashing.sha256().hashBytes(IOUtils.toByteArray(stream)).toString().toUpperCase(Locale.ENGLISH);
+ } catch (final IOException ex) {
+ throw new RuntimeException("Failed to take hash of InputStream", ex);
+ }
+ }
+
+ /**
+ * Hash the provided file using SHA-256.
+ *
+ * @param file file
+ * @return SHA-256 hash string
+ */
+ public static String sha256(final Path file) {
+ if (!Files.isRegularFile(file)) {
+ throw new IllegalArgumentException("'" + file + "' is not a regular file!");
+ }
+ final HashCode hash;
+ try {
+ hash = com.google.common.io.Files.asByteSource(file.toFile()).hash(com.google.common.hash.Hashing.sha256());
+ } catch (final IOException ex) {
+ throw new RuntimeException("Failed to take hash of file '" + file + "'", ex);
+ }
+ return hash.toString().toUpperCase(Locale.ENGLISH);
+ }
+}
diff --git a/src/main/java/io/papermc/paper/util/MappingEnvironment.java b/src/main/java/io/papermc/paper/util/MappingEnvironment.java
new file mode 100644
index 0000000000000000000000000000000000000000..8e4229634d41a42b3d93948eebb77def7c0c72b1
--- /dev/null
+++ b/src/main/java/io/papermc/paper/util/MappingEnvironment.java
@@ -0,0 +1,65 @@
+package io.papermc.paper.util;
+
+import java.io.InputStream;
+import java.util.Objects;
+import java.util.jar.Manifest;
+import net.minecraft.world.entity.MobCategory;
+import org.checkerframework.checker.nullness.qual.NonNull;
+import org.checkerframework.checker.nullness.qual.Nullable;
+import org.checkerframework.framework.qual.DefaultQualifier;
+
+@DefaultQualifier(NonNull.class)
+public final class MappingEnvironment {
+ private static final @Nullable String MAPPINGS_HASH = readMappingsHash();
+ private static final boolean REOBF = checkReobf();
+
+ private MappingEnvironment() {
+ }
+
+ public static boolean reobf() {
+ return REOBF;
+ }
+
+ public static boolean hasMappings() {
+ return MAPPINGS_HASH != null;
+ }
+
+ public static InputStream mappingsStream() {
+ return Objects.requireNonNull(mappingsStreamIfPresent(), "Missing mappings!");
+ }
+
+ public static @Nullable InputStream mappingsStreamIfPresent() {
+ return MappingEnvironment.class.getClassLoader().getResourceAsStream("META-INF/mappings/reobf.tiny");
+ }
+
+ public static String mappingsHash() {
+ return Objects.requireNonNull(MAPPINGS_HASH, "MAPPINGS_HASH");
+ }
+
+ private static @Nullable String readMappingsHash() {
+ final @Nullable Manifest manifest = JarManifests.manifest(MappingEnvironment.class);
+ if (manifest != null) {
+ final Object hash = manifest.getMainAttributes().getValue("Included-Mappings-Hash");
+ if (hash != null) {
+ return hash.toString();
+ }
+ }
+
+ final @Nullable InputStream stream = mappingsStreamIfPresent();
+ if (stream == null) {
+ return null;
+ }
+ return Hashing.sha256(stream);
+ }
+
+ @SuppressWarnings("ConstantConditions")
+ private static boolean checkReobf() {
+ final Class<?> clazz = MobCategory.class;
+ if (clazz.getSimpleName().equals("MobCategory")) {
+ return false;
+ } else if (clazz.getSimpleName().equals("EnumCreatureType")) {
+ return true;
+ }
+ throw new IllegalStateException();
+ }
+}
diff --git a/src/main/java/io/papermc/paper/util/ObfHelper.java b/src/main/java/io/papermc/paper/util/ObfHelper.java
index e8ff684d8bd994c64ff34f20e1e0601b678244c1..3f03d5efcd95e3adb76dc4292b3d2c420fdc58af 100644
--- a/src/main/java/io/papermc/paper/util/ObfHelper.java
+++ b/src/main/java/io/papermc/paper/util/ObfHelper.java
@@ -89,10 +89,10 @@ public enum ObfHelper {
}
private static @Nullable Set<ClassMapping> loadMappingsIfPresent() {
- try (final @Nullable InputStream mappingsInputStream = ObfHelper.class.getClassLoader().getResourceAsStream("META-INF/mappings/reobf.tiny")) {
- if (mappingsInputStream == null) {
- return null;
- }
+ if (!MappingEnvironment.hasMappings()) {
+ return null;
+ }
+ try (final InputStream mappingsInputStream = MappingEnvironment.mappingsStream()) {
final MemoryMappingTree tree = new MemoryMappingTree();
MappingReader.read(new InputStreamReader(mappingsInputStream, StandardCharsets.UTF_8), MappingFormat.TINY_2_FILE, tree);
final Set<ClassMapping> classes = new HashSet<>();
diff --git a/src/main/java/io/papermc/paper/util/concurrent/ScalingThreadPool.java b/src/main/java/io/papermc/paper/util/concurrent/ScalingThreadPool.java
new file mode 100644
index 0000000000000000000000000000000000000000..badff5d6ae6dd8d209c82bc7e8afe370db6148f2
--- /dev/null
+++ b/src/main/java/io/papermc/paper/util/concurrent/ScalingThreadPool.java
@@ -0,0 +1,85 @@
+package io.papermc.paper.util.concurrent;
+
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.RejectedExecutionHandler;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * Utilities for scaling thread pools.
+ *
+ * @see <a href="https://medium.com/@uditharosha/java-scale-first-executorservice-4245a63222df">Java Scale First ExecutorService — A myth or a reality</a>
+ */
+public final class ScalingThreadPool {
+ private ScalingThreadPool() {
+ }
+
+ public static RejectedExecutionHandler defaultReEnqueuePolicy() {
+ return reEnqueuePolicy(new ThreadPoolExecutor.AbortPolicy());
+ }
+
+ public static RejectedExecutionHandler reEnqueuePolicy(final RejectedExecutionHandler original) {
+ return new ReEnqueuePolicy(original);
+ }
+
+ public static <E> BlockingQueue<E> createUnboundedQueue() {
+ return new Queue<>();
+ }
+
+ public static <E> BlockingQueue<E> createQueue(final int capacity) {
+ return new Queue<>(capacity);
+ }
+
+ private static final class Queue<E> extends LinkedBlockingQueue<E> {
+ private final AtomicInteger idleThreads = new AtomicInteger(0);
+
+ private Queue() {
+ super();
+ }
+
+ private Queue(final int capacity) {
+ super(capacity);
+ }
+
+ @Override
+ public boolean offer(final E e) {
+ return this.idleThreads.get() > 0 && super.offer(e);
+ }
+
+ @Override
+ public E take() throws InterruptedException {
+ this.idleThreads.incrementAndGet();
+ try {
+ return super.take();
+ } finally {
+ this.idleThreads.decrementAndGet();
+ }
+ }
+
+ @Override
+ public E poll(final long timeout, final TimeUnit unit) throws InterruptedException {
+ this.idleThreads.incrementAndGet();
+ try {
+ return super.poll(timeout, unit);
+ } finally {
+ this.idleThreads.decrementAndGet();
+ }
+ }
+
+ @Override
+ public boolean add(final E e) {
+ return super.offer(e);
+ }
+ }
+
+ private record ReEnqueuePolicy(RejectedExecutionHandler originalHandler) implements RejectedExecutionHandler {
+ @Override
+ public void rejectedExecution(final Runnable r, final ThreadPoolExecutor executor) {
+ if (!executor.getQueue().add(r)) {
+ this.originalHandler.rejectedExecution(r, executor);
+ }
+ }
+ }
+}
diff --git a/src/main/java/net/minecraft/server/MinecraftServer.java b/src/main/java/net/minecraft/server/MinecraftServer.java
2024-04-23 21:23:27 +02:00
index 7b5d3fac1b5f4d22340a620a5edd5a8fba6be584..e021ac312127e0544606ab466cab781915e027d2 100644
2024-04-23 20:44:28 +02:00
--- a/src/main/java/net/minecraft/server/MinecraftServer.java
+++ b/src/main/java/net/minecraft/server/MinecraftServer.java
2024-04-23 21:23:27 +02:00
@@ -637,6 +637,7 @@ public abstract class MinecraftServer extends ReentrantBlockableEventLoop<TickTa
2024-04-23 20:44:28 +02:00
this.server.enablePlugins(org.bukkit.plugin.PluginLoadOrder.POSTWORLD);
this.server.getPluginManager().callEvent(new ServerLoadEvent(ServerLoadEvent.LoadType.STARTUP));
+ if (io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper != null) io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper.pluginsEnabled(); // Paper - Remap plugins
this.connection.acceptConnections();
}
2024-04-23 21:23:27 +02:00
@@ -909,6 +910,7 @@ public abstract class MinecraftServer extends ReentrantBlockableEventLoop<TickTa
this.server.disablePlugins();
2024-04-23 20:44:28 +02:00
}
// CraftBukkit end
+ if (io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper != null) io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper.shutdown(); // Paper - Plugin remapping
this.getConnection().stop();
this.isSaving = true;
if (this.playerList != null) {
diff --git a/src/main/java/net/minecraft/server/dedicated/DedicatedServer.java b/src/main/java/net/minecraft/server/dedicated/DedicatedServer.java
2024-04-23 21:23:27 +02:00
index 0d6688e9da81453e64daacfb81e4a13cc37b3e66..98cba55466d6798e5de33d8dcbf03e205e5199d8 100644
2024-04-23 20:44:28 +02:00
--- a/src/main/java/net/minecraft/server/dedicated/DedicatedServer.java
+++ b/src/main/java/net/minecraft/server/dedicated/DedicatedServer.java
2024-04-23 21:23:27 +02:00
@@ -315,6 +315,12 @@ public class DedicatedServer extends MinecraftServer implements ServerInterface
2024-04-23 20:44:28 +02:00
}
}
+ // Paper start
+ public java.io.File getPluginsFolder() {
+ return (java.io.File) this.options.valueOf("plugins");
+ }
+ // Paper end
+
@Override
public boolean isSpawningAnimals() {
return this.getProperties().spawnAnimals && super.isSpawningAnimals();
diff --git a/src/main/java/net/neoforged/art/internal/RenamerImpl.java b/src/main/java/net/neoforged/art/internal/RenamerImpl.java
new file mode 100644
index 0000000000000000000000000000000000000000..73b20a92f330311e3fef8f03b51a098513afafc1
--- /dev/null
+++ b/src/main/java/net/neoforged/art/internal/RenamerImpl.java
@@ -0,0 +1,308 @@
+/*
+ * Forge Auto Renaming Tool
+ * Copyright (c) 2021
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation version 2.1
+ * of the License.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+package net.neoforged.art.internal;
+
+import java.io.BufferedOutputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.file.Files;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Enumeration;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Objects;
+import java.util.Set;
+import java.util.function.Consumer;
+import java.util.stream.Collectors;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipFile;
+import java.util.zip.ZipOutputStream;
+
+import net.neoforged.cliutils.JarUtils;
+import net.neoforged.cliutils.progress.ProgressReporter;
+import org.objectweb.asm.Opcodes;
+
+import net.neoforged.art.api.ClassProvider;
+import net.neoforged.art.api.Renamer;
+import net.neoforged.art.api.Transformer;
+import net.neoforged.art.api.Transformer.ClassEntry;
+import net.neoforged.art.api.Transformer.Entry;
+import net.neoforged.art.api.Transformer.ManifestEntry;
+import net.neoforged.art.api.Transformer.ResourceEntry;
+
+public class RenamerImpl implements Renamer { // Paper - public
+ private static final ProgressReporter PROGRESS = ProgressReporter.getDefault();
+ static final int MAX_ASM_VERSION = Opcodes.ASM9;
+ private static final String MANIFEST_NAME = "META-INF/MANIFEST.MF";
+ private final List<File> libraries;
+ private final List<Transformer> transformers;
+ private final SortedClassProvider sortedClassProvider;
+ private final List<ClassProvider> classProviders;
+ private final int threads;
+ private final Consumer<String> logger;
+ private final Consumer<String> debug;
+ private boolean setup = false;
+ private ClassProvider libraryClasses;
+
+ RenamerImpl(List<File> libraries, List<Transformer> transformers, SortedClassProvider sortedClassProvider, List<ClassProvider> classProviders,
+ int threads, Consumer<String> logger, Consumer<String> debug) {
+ this.libraries = libraries;
+ this.transformers = transformers;
+ this.sortedClassProvider = sortedClassProvider;
+ this.classProviders = Collections.unmodifiableList(classProviders);
+ this.threads = threads;
+ this.logger = logger;
+ this.debug = debug;
+ }
+
+ private void setup() {
+ if (this.setup)
+ return;
+
+ this.setup = true;
+
+ ClassProvider.Builder libraryClassesBuilder = ClassProvider.builder().shouldCacheAll(true);
+ this.logger.accept("Adding Libraries to Inheritance");
+ this.libraries.forEach(f -> libraryClassesBuilder.addLibrary(f.toPath()));
+
+ this.libraryClasses = libraryClassesBuilder.build();
+ }
+
+ @Override
+ public void run(File input, File output) {
+ // Paper start - Add remappingSelf
+ this.run(input, output, false);
+ }
+ public void run(File input, File output, boolean remappingSelf) {
+ // Paper end
+ if (!this.setup)
+ this.setup();
+
+ if (Boolean.getBoolean(ProgressReporter.ENABLED_PROPERTY)) {
+ try {
+ PROGRESS.setMaxProgress(JarUtils.getFileCountInZip(input));
+ } catch (IOException e) {
+ logger.accept("Failed to read zip file count: " + e);
+ }
+ }
+
+ input = Objects.requireNonNull(input).getAbsoluteFile();
+ output = Objects.requireNonNull(output).getAbsoluteFile();
+
+ if (!input.exists())
+ throw new IllegalArgumentException("Input file not found: " + input.getAbsolutePath());
+
+ logger.accept("Reading Input: " + input.getAbsolutePath());
+ PROGRESS.setStep("Reading input jar");
+ // Read everything from the input jar!
+ List<Entry> oldEntries = new ArrayList<>();
+ try (ZipFile in = new ZipFile(input)) {
+ int amount = 0;
+ for (Enumeration<? extends ZipEntry> entries = in.entries(); entries.hasMoreElements();) {
+ final ZipEntry e = entries.nextElement();
+ if (e.isDirectory())
+ continue;
+ String name = e.getName();
+ byte[] data;
+ try (InputStream entryInput = in.getInputStream(e)) {
+ data = entryInput.readAllBytes(); // Paper - Use readAllBytes
+ }
+
+ if (name.endsWith(".class") && !name.contains("META-INF/")) // Paper - Skip META-INF entries
+ oldEntries.add(ClassEntry.create(name, e.getTime(), data));
+ else if (name.equals(MANIFEST_NAME))
+ oldEntries.add(ManifestEntry.create(e.getTime(), data));
+ else if (name.equals("javadoctor.json"))
+ oldEntries.add(Transformer.JavadoctorEntry.create(e.getTime(), data));
+ else
+ oldEntries.add(ResourceEntry.create(name, e.getTime(), data));
+
+ if ((++amount) % 10 == 0) {
+ PROGRESS.setProgress(amount);
+ }
+ }
+ } catch (IOException e) {
+ throw new RuntimeException("Could not parse input: " + input.getAbsolutePath(), e);
+ }
+
+ this.sortedClassProvider.clearCache();
+ ArrayList<ClassProvider> classProviders = new ArrayList<>(this.classProviders);
+ classProviders.add(0, this.libraryClasses);
+ this.sortedClassProvider.classProviders = classProviders;
+
+ AsyncHelper async = new AsyncHelper(threads);
+ try {
+
+ /* Disabled until we do something with it
+ // Gather original file Hashes, so that we can detect changes and update the manifest if necessary
+ log("Gathering original hashes");
+ Map<String, String> oldHashes = async.invokeAll(oldEntries,
+ e -> new Pair<>(e.getName(), HashFunction.SHA256.hash(e.getData()))
+ ).stream().collect(Collectors.toMap(Pair::getLeft, Pair::getRight));
+ */
+
+ PROGRESS.setProgress(0);
+ PROGRESS.setIndeterminate(true);
+ PROGRESS.setStep("Processing entries");
+
+ List<ClassEntry> ourClasses = oldEntries.stream()
+ .filter(e -> e instanceof ClassEntry && !e.getName().startsWith("META-INF/"))
+ .map(ClassEntry.class::cast)
+ .collect(Collectors.toList());
+
+ // Add the original classes to the inheritance map, TODO: Multi-Release somehow?
+ logger.accept("Adding input to inheritance map");
+ ClassProvider.Builder inputClassesBuilder = ClassProvider.builder();
+ async.consumeAll(ourClasses, ClassEntry::getClassName, c ->
+ inputClassesBuilder.addClass(c.getName().substring(0, c.getName().length() - 6), c.getData())
+ );
+ classProviders.add(0, inputClassesBuilder.build());
+
+ // Process everything
+ logger.accept("Processing entries");
+ List<Entry> newEntries = async.invokeAll(oldEntries, Entry::getName, this::processEntry);
+
+ logger.accept("Adding extras");
+ // Paper start - I'm pretty sure the duplicates are because the input is already on the classpath
+ List<Entry> finalNewEntries = newEntries;
+ transformers.forEach(t -> finalNewEntries.addAll(t.getExtras()));
+
+ Set<String> seen = new HashSet<>();
+ if (remappingSelf) {
+ // deduplicate
+ List<Entry> n = new ArrayList<>();
+ for (final Entry e : newEntries) {
+ if (seen.add(e.getName())) {
+ n.add(e);
+ }
+ }
+ newEntries = n;
+ } else {
+ String dupes = newEntries.stream().map(Entry::getName)
+ .filter(n -> !seen.add(n))
+ .sorted()
+ .collect(Collectors.joining(", "));
+ if (!dupes.isEmpty())
+ throw new IllegalStateException("Duplicate entries detected: " + dupes);
+ }
+ // Paper end
+
+ // We care about stable output, so sort, and single thread write.
+ logger.accept("Sorting");
+ Collections.sort(newEntries, this::compare);
+
+ if (!output.getParentFile().exists())
+ output.getParentFile().mkdirs();
+
+ seen.clear();
+
+ PROGRESS.setMaxProgress(newEntries.size());
+ PROGRESS.setStep("Writing output");
+
+ logger.accept("Writing Output: " + output.getAbsolutePath());
+ try (OutputStream fos = new BufferedOutputStream(Files.newOutputStream(output.toPath()));
+ ZipOutputStream zos = new ZipOutputStream(fos)) {
+
+ int amount = 0;
+ for (Entry e : newEntries) {
+ String name = e.getName();
+ int idx = name.lastIndexOf('/');
+ if (idx != -1)
+ addDirectory(zos, seen, name.substring(0, idx));
+
+ logger.accept(" " + name);
+ ZipEntry entry = new ZipEntry(name);
+ entry.setTime(e.getTime());
+ zos.putNextEntry(entry);
+ zos.write(e.getData());
+ zos.closeEntry();
+
+ if ((++amount) % 10 == 0) {
+ PROGRESS.setProgress(amount);
+ }
+ }
+
+ PROGRESS.setProgress(amount);
+ }
+ } catch (final IOException e) {
+ throw new RuntimeException("Could not write to file " + output.getAbsolutePath(), e);
+ } finally {
+ async.shutdown();
+ }
+ }
+
+ private byte[] readAllBytes(InputStream in, long size) throws IOException {
+ // This program will crash if size exceeds MAX_INT anyway since arrays are limited to 32-bit indices
+ ByteArrayOutputStream tmp = new ByteArrayOutputStream(size >= 0 ? (int) size : 0);
+
+ byte[] buffer = new byte[8192];
+ int read;
+ while ((read = in.read(buffer)) != -1) {
+ tmp.write(buffer, 0, read);
+ }
+
+ return tmp.toByteArray();
+ }
+
+ // Tho Directory entries are not strictly necessary, we add them because some bad implementations of Zip extractors
+ // attempt to extract files without making sure the parents exist.
+ private void addDirectory(ZipOutputStream zos, Set<String> seen, String path) throws IOException {
+ if (!seen.add(path))
+ return;
+
+ int idx = path.lastIndexOf('/');
+ if (idx != -1)
+ addDirectory(zos, seen, path.substring(0, idx));
+
+ logger.accept(" " + path + '/');
+ ZipEntry dir = new ZipEntry(path + '/');
+ dir.setTime(Entry.STABLE_TIMESTAMP);
+ zos.putNextEntry(dir);
+ zos.closeEntry();
+ }
+
+ private Entry processEntry(final Entry start) {
+ Entry entry = start;
+ for (Transformer transformer : RenamerImpl.this.transformers) {
+ entry = entry.process(transformer);
+ if (entry == null)
+ return null;
+ }
+ return entry;
+ }
+
+ private int compare(Entry o1, Entry o2) {
+ // In order for JarInputStream to work, MANIFEST has to be the first entry, so make it first!
+ if (MANIFEST_NAME.equals(o1.getName()))
+ return MANIFEST_NAME.equals(o2.getName()) ? 0 : -1;
+ if (MANIFEST_NAME.equals(o2.getName()))
+ return MANIFEST_NAME.equals(o1.getName()) ? 0 : 1;
+ return o1.getName().compareTo(o2.getName());
+ }
+
+ @Override
+ public void close() throws IOException {
+ this.sortedClassProvider.close();
+ }
+}
diff --git a/src/main/java/org/bukkit/craftbukkit/CraftServer.java b/src/main/java/org/bukkit/craftbukkit/CraftServer.java
2024-04-23 21:23:27 +02:00
index 45160b93a24dc74f6368441e2a4fe659ceaf5bf5..6573e72d041714ccc2bf0e3c8734bc212caf534e 100644
2024-04-23 20:44:28 +02:00
--- a/src/main/java/org/bukkit/craftbukkit/CraftServer.java
+++ b/src/main/java/org/bukkit/craftbukkit/CraftServer.java
2024-04-23 21:23:27 +02:00
@@ -966,6 +966,7 @@ public final class CraftServer implements Server {
2024-04-23 20:44:28 +02:00
this.enablePlugins(PluginLoadOrder.STARTUP);
this.enablePlugins(PluginLoadOrder.POSTWORLD);
this.getPluginManager().callEvent(new ServerLoadEvent(ServerLoadEvent.LoadType.RELOAD));
+ if (io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper != null) io.papermc.paper.plugin.PluginInitializerManager.instance().pluginRemapper.pluginsEnabled(); // Paper - Remap plugins
}
2024-04-23 21:23:27 +02:00
@Override
2024-04-23 20:44:28 +02:00
diff --git a/src/main/java/org/bukkit/craftbukkit/util/CraftMagicNumbers.java b/src/main/java/org/bukkit/craftbukkit/util/CraftMagicNumbers.java
2024-04-23 21:23:27 +02:00
index adf28b8e77d42267ce41713e031ee316366202e8..29698be6e4990769cbe1c00088dfb9cfc73d966a 100644
2024-04-23 20:44:28 +02:00
--- a/src/main/java/org/bukkit/craftbukkit/util/CraftMagicNumbers.java
+++ b/src/main/java/org/bukkit/craftbukkit/util/CraftMagicNumbers.java
2024-04-23 21:23:27 +02:00
@@ -72,6 +72,7 @@ import org.bukkit.potion.PotionType;
2024-04-23 20:44:28 +02:00
@SuppressWarnings("deprecation")
public final class CraftMagicNumbers implements UnsafeValues {
+ public static final boolean DISABLE_PLUGIN_REWRITING = Boolean.getBoolean("paper.disable-plugin-rewriting");
public static final UnsafeValues INSTANCE = new CraftMagicNumbers();
private CraftMagicNumbers() {}
2024-04-23 21:23:27 +02:00
@@ -356,7 +357,7 @@ public final class CraftMagicNumbers implements UnsafeValues {
throw new InvalidPluginException("Plugin API version " + pdf.getAPIVersion() + " is lower than the minimum allowed version. Please update or replace it.");
}
- if (toCheck.isOlderThan(ApiVersion.FLATTENING)) {
+ if (!DISABLE_PLUGIN_REWRITING && toCheck.isOlderThan(ApiVersion.FLATTENING)) { // Paper
CraftLegacy.init();
}
@@ -371,6 +372,7 @@ public final class CraftMagicNumbers implements UnsafeValues {
2024-04-23 20:44:28 +02:00
@Override
public byte[] processClass(PluginDescriptionFile pdf, String path, byte[] clazz) {
2024-04-23 21:23:27 +02:00
+ if (DISABLE_PLUGIN_REWRITING) return clazz; // Paper
2024-04-23 20:44:28 +02:00
try {
2024-04-23 21:23:27 +02:00
clazz = Commodore.convert(clazz, pdf.getName(), ApiVersion.getOrCreateVersion(pdf.getAPIVersion()));
2024-04-23 20:44:28 +02:00
} catch (Exception ex) {