diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..022b8414 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,5 @@ +# +# https://help.github.com/articles/dealing-with-line-endings/ +# +# These are explicitly windows files and should use crlf +*.bat text eol=crlf diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bc3588aa..332d98b9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,29 +16,36 @@ env: jobs: build-gradle: - runs-on: ubuntu-latest + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + + runs-on: ${{ matrix.os }} + steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 - - name: validate gradle wrapper - uses: gradle/wrapper-validation-action@v1 + - name: Validate Gradle wrapper + uses: gradle/actions/wrapper-validation@v3 - - uses: actions/setup-java@v3 + - name: Setup Java + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: '21' - name: Setup Gradle - uses: gradle/gradle-build-action@v2 + uses: gradle/actions/setup-gradle@v3 - - name: Run build with Gradle wrapper - run: ./gradlew build + - name: Run build and tests with Gradle wrapper + run: ./gradlew test build - name: Upload all artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 + if: matrix.os == 'ubuntu-latest' with: name: jars path: | diff --git a/build.gradle b/build.gradle index 4c98616c..5d15ff22 100644 --- a/build.gradle +++ b/build.gradle @@ -2,6 +2,9 @@ plugins { id 'org.cadixdev.licenser' version '0.6.1' apply false } +import org.gradle.api.tasks.testing.logging.TestExceptionFormat +import org.gradle.api.tasks.testing.logging.TestLogEvent + allprojects { group = 'me.lucko' version = '1.10-SNAPSHOT' @@ -20,11 +23,21 @@ subprojects { pluginDescription = 'spark is a performance profiling plugin/mod for Minecraft clients, servers and proxies.' } - tasks.withType(JavaCompile) { + tasks.withType(JavaCompile).configureEach { options.encoding = 'UTF-8' options.release = 8 } + tasks.withType(Test).configureEach { + testLogging { + events = [TestLogEvent.PASSED, TestLogEvent.FAILED, TestLogEvent.SKIPPED] + exceptionFormat = TestExceptionFormat.FULL + showExceptions = true + showCauses = true + showStackTraces = true + } + } + processResources { duplicatesStrategy = DuplicatesStrategy.INCLUDE } @@ -35,6 +48,16 @@ subprojects { } repositories { + // Fix issue with lwjgl-freetype not being found on macOS / ForgeGradle issue + // + // Could not resolve all files for configuration ':_compileJava_1'. + // Could not find lwjgl-freetype-3.3.3-natives-macos-patch.jar (org.lwjgl:lwjgl-freetype:3.3.3). + maven { + url "https://libraries.minecraft.net" + content { + includeModule("org.lwjgl", "lwjgl-freetype") + } + } mavenCentral() maven { url "https://oss.sonatype.org/content/repositories/snapshots/" } maven { url "https://repo.lucko.me/" } diff --git a/gradle.properties b/gradle.properties index 1c3cd0da..afa7c6cc 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,5 +1,5 @@ org.gradle.jvmargs=-Xmx2G -org.gradle.parallel=true +org.gradle.parallel=false # thanks, forge org.gradle.daemon=false \ No newline at end of file diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index e6441136..2c352119 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index b82aa23a..09523c0e 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.9-bin.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/gradlew b/gradlew index 1aa94a42..f5feea6d 100755 --- a/gradlew +++ b/gradlew @@ -15,6 +15,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +# SPDX-License-Identifier: Apache-2.0 +# ############################################################################## # @@ -55,7 +57,7 @@ # Darwin, MinGW, and NonStop. # # (3) This script is generated from the Groovy template -# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt # within the Gradle project. # # You can find Gradle at https://github.com/gradle/gradle/. @@ -84,7 +86,8 @@ done # shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) -APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit +APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s +' "$PWD" ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum diff --git a/gradlew.bat b/gradlew.bat index 25da30db..9d21a218 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -13,6 +13,8 @@ @rem See the License for the specific language governing permissions and @rem limitations under the License. @rem +@rem SPDX-License-Identifier: Apache-2.0 +@rem @if "%DEBUG%"=="" @echo off @rem ########################################################################## diff --git a/settings.gradle b/settings.gradle index 6c3c251a..278ba8d8 100644 --- a/settings.gradle +++ b/settings.gradle @@ -1,5 +1,6 @@ pluginManagement { repositories { + gradlePluginPortal() maven { name = 'Fabric' url = 'https://maven.fabricmc.net/' @@ -12,7 +13,6 @@ pluginManagement { name = 'NeoForge' url = 'https://maven.neoforged.net/releases' } - gradlePluginPortal() } } @@ -25,6 +25,7 @@ include ( 'spark-api', 'spark-common', 'spark-bukkit', + 'spark-paper', 'spark-bungeecord', 'spark-velocity', 'spark-velocity4', diff --git a/spark-api/build.gradle b/spark-api/build.gradle index 0fbe9e1a..19f70c1d 100644 --- a/spark-api/build.gradle +++ b/spark-api/build.gradle @@ -5,8 +5,8 @@ plugins { version = '0.1-SNAPSHOT' dependencies { - compileOnly 'org.checkerframework:checker-qual:3.8.0' - compileOnly 'org.jetbrains:annotations:20.1.0' + compileOnly 'org.checkerframework:checker-qual:3.44.0' + compileOnly 'org.jetbrains:annotations:24.1.0' } license { diff --git a/spark-api/src/main/java/me/lucko/spark/api/Spark.java b/spark-api/src/main/java/me/lucko/spark/api/Spark.java index 653eb536..a5f20d6b 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/Spark.java +++ b/spark-api/src/main/java/me/lucko/spark/api/Spark.java @@ -26,10 +26,10 @@ package me.lucko.spark.api; import me.lucko.spark.api.gc.GarbageCollector; +import me.lucko.spark.api.placeholder.PlaceholderResolver; import me.lucko.spark.api.statistic.misc.DoubleAverageInfo; import me.lucko.spark.api.statistic.types.DoubleStatistic; import me.lucko.spark.api.statistic.types.GenericStatistic; - import org.checkerframework.checker.nullness.qual.NonNull; import org.checkerframework.checker.nullness.qual.Nullable; import org.jetbrains.annotations.Unmodifiable; @@ -84,4 +84,12 @@ public interface Spark { */ @NonNull @Unmodifiable Map gc(); + /** + * Gets a placeholder resolver. + * + * @return a placeholder resolver + */ + @NonNull + PlaceholderResolver placeholders(); + } diff --git a/spark-api/src/main/java/me/lucko/spark/api/placeholder/PlaceholderResolver.java b/spark-api/src/main/java/me/lucko/spark/api/placeholder/PlaceholderResolver.java new file mode 100644 index 00000000..20834cb7 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/placeholder/PlaceholderResolver.java @@ -0,0 +1,54 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.placeholder; + +import org.checkerframework.checker.nullness.qual.NonNull; +import org.checkerframework.checker.nullness.qual.Nullable; + +/** + * Resolves spark placeholders. + * + *

See spark docs for more info.

+ */ +public interface PlaceholderResolver { + + /** + * Resolves the given placeholder to a legacy formatted string. + * + * @param placeholder the placeholder to resolve + * @return the resolved placeholder + */ + @Nullable String resolveLegacyFormatting(@NonNull String placeholder); + + /** + * Resolves the given placeholder to a text component serialised to json. + * + * @param placeholder the placeholder to resolve + * @return the resolved placeholder + */ + @Nullable String resolveComponentJson(@NonNull String placeholder); + +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/statistic/types/DoubleStatistic.java b/spark-api/src/main/java/me/lucko/spark/api/statistic/types/DoubleStatistic.java index 50c484e3..e5486e17 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/statistic/types/DoubleStatistic.java +++ b/spark-api/src/main/java/me/lucko/spark/api/statistic/types/DoubleStatistic.java @@ -27,7 +27,6 @@ import me.lucko.spark.api.statistic.Statistic; import me.lucko.spark.api.statistic.StatisticWindow; - import org.checkerframework.checker.nullness.qual.NonNull; /** diff --git a/spark-api/src/main/java/me/lucko/spark/api/statistic/types/GenericStatistic.java b/spark-api/src/main/java/me/lucko/spark/api/statistic/types/GenericStatistic.java index c91801b6..a2d298da 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/statistic/types/GenericStatistic.java +++ b/spark-api/src/main/java/me/lucko/spark/api/statistic/types/GenericStatistic.java @@ -27,7 +27,6 @@ import me.lucko.spark.api.statistic.Statistic; import me.lucko.spark.api.statistic.StatisticWindow; - import org.checkerframework.checker.nullness.qual.NonNull; /** diff --git a/spark-bukkit/build.gradle b/spark-bukkit/build.gradle index 7d78db88..da46dc30 100644 --- a/spark-bukkit/build.gradle +++ b/spark-bukkit/build.gradle @@ -1,11 +1,15 @@ plugins { - id 'com.github.johnrengelman.shadow' version '8.1.1' + id 'com.gradleup.shadow' version '8.3.0' +} + +java { + disableAutoTargetJvm() } dependencies { implementation project(':spark-common') - implementation 'net.kyori:adventure-platform-bukkit:4.3.1' - compileOnly 'com.destroystokyo.paper:paper-api:1.16.4-R0.1-SNAPSHOT' + implementation 'net.kyori:adventure-platform-bukkit:4.3.3' + compileOnly 'io.papermc.paper:paper-api:1.21.1-R0.1-SNAPSHOT' // placeholders compileOnly 'me.clip:placeholderapi:2.10.3' @@ -33,6 +37,7 @@ shadowJar { relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' + relocate 'net.kyori.option', 'me.lucko.spark.lib.adventure.option' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' @@ -43,6 +48,8 @@ shadowJar { exclude 'module-info.class' exclude 'META-INF/maven/**' exclude 'META-INF/proguard/**' + exclude '**/*.proto' + exclude '**/*.proto.bin' } artifacts { diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java index d3b2aa16..e505907c 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java @@ -21,7 +21,6 @@ package me.lucko.spark.bukkit; import me.lucko.spark.common.sampler.source.ClassSourceLookup; - import org.bukkit.plugin.java.JavaPlugin; import java.lang.reflect.Field; diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitCommandSender.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitCommandSender.java index df6f6581..11f7e9ec 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitCommandSender.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitCommandSender.java @@ -21,11 +21,9 @@ package me.lucko.spark.bukkit; import me.lucko.spark.common.command.sender.AbstractCommandSender; - import net.kyori.adventure.audience.Audience; import net.kyori.adventure.platform.bukkit.BukkitAudiences; import net.kyori.adventure.text.Component; - import org.bukkit.command.CommandSender; import org.bukkit.entity.Player; diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlatformInfo.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlatformInfo.java index 2bf17acc..60522f4b 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlatformInfo.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlatformInfo.java @@ -21,7 +21,6 @@ package me.lucko.spark.bukkit; import me.lucko.spark.common.platform.PlatformInfo; - import org.bukkit.Server; import java.lang.reflect.Field; @@ -44,6 +43,11 @@ public String getName() { return "Bukkit"; } + @Override + public String getBrand() { + return this.server.getName(); + } + @Override public String getVersion() { return this.server.getVersion(); diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlayerPingProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlayerPingProvider.java index 2cf58cf0..988ff9f0 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlayerPingProvider.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlayerPingProvider.java @@ -21,9 +21,7 @@ package me.lucko.spark.bukkit; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.common.monitor.ping.PlayerPingProvider; - import org.bukkit.Server; import org.bukkit.entity.Player; diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java index 15ec1295..e5181406 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java @@ -20,6 +20,7 @@ package me.lucko.spark.bukkit; +import co.aikar.timings.TimingsManager; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.gson.Gson; @@ -27,19 +28,15 @@ import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonSerializer; - import me.lucko.spark.common.platform.serverconfig.ConfigParser; import me.lucko.spark.common.platform.serverconfig.ExcludedConfigFilter; import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; - import org.bukkit.Bukkit; import org.bukkit.World; import org.bukkit.configuration.MemorySection; import org.bukkit.configuration.file.YamlConfiguration; -import co.aikar.timings.TimingsManager; - import java.io.BufferedReader; import java.io.IOException; import java.nio.file.Files; diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java index 87490ea2..afab5554 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java @@ -34,9 +34,7 @@ import me.lucko.spark.common.sampler.source.SourceMetadata; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; - import net.kyori.adventure.platform.bukkit.BukkitAudiences; - import org.bukkit.ChatColor; import org.bukkit.command.Command; import org.bukkit.command.CommandExecutor; @@ -190,7 +188,8 @@ public Collection getKnownSources() { Arrays.asList(getServer().getPluginManager().getPlugins()), Plugin::getName, plugin -> plugin.getDescription().getVersion(), - plugin -> String.join(", ", plugin.getDescription().getAuthors()) + plugin -> String.join(", ", plugin.getDescription().getAuthors()), + plugin -> plugin.getDescription().getDescription() ); } diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickHook.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickHook.java index 6547971b..eb49e0bf 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickHook.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickHook.java @@ -22,7 +22,6 @@ import me.lucko.spark.common.tick.AbstractTickHook; import me.lucko.spark.common.tick.TickHook; - import org.bukkit.plugin.Plugin; import org.bukkit.scheduler.BukkitTask; diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java index babb0bc7..303cc3ee 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java @@ -23,8 +23,8 @@ import me.lucko.spark.common.platform.world.AbstractChunkInfo; import me.lucko.spark.common.platform.world.CountMap; import me.lucko.spark.common.platform.world.WorldInfoProvider; - import org.bukkit.Chunk; +import org.bukkit.GameRule; import org.bukkit.Server; import org.bukkit.World; import org.bukkit.block.BlockState; @@ -32,10 +32,16 @@ import org.bukkit.entity.EntityType; import java.util.ArrayList; +import java.util.Collection; import java.util.List; +import java.util.Locale; +import java.util.Objects; +import java.util.stream.Collectors; public class BukkitWorldInfoProvider implements WorldInfoProvider { private static final boolean SUPPORTS_PAPER_COUNT_METHODS; + private static final boolean SUPPORTS_GAMERULES; + private static final boolean SUPPORTS_DATAPACKS; static { boolean supportsPaperCountMethods = false; @@ -47,7 +53,27 @@ public class BukkitWorldInfoProvider implements WorldInfoProvider { } catch (Exception e) { // ignored } + + boolean supportsGameRules = false; + try { + Class.forName("org.bukkit.GameRule"); + World.class.getMethod("getGameRuleValue", GameRule.class); + supportsGameRules = true; + } catch (Exception e) { + // ignored + } + + boolean supportsDataPacks = false; + try { + Server.class.getMethod("getDataPackManager"); + supportsDataPacks = true; + } catch (Exception e) { + // ignored + } + SUPPORTS_PAPER_COUNT_METHODS = supportsPaperCountMethods; + SUPPORTS_GAMERULES = supportsGameRules; + SUPPORTS_DATAPACKS = supportsDataPacks; } private final Server server; @@ -111,6 +137,53 @@ public ChunksResult pollChunks() { return data; } + @Override + public GameRulesResult pollGameRules() { + if (!SUPPORTS_GAMERULES) { + return null; + } + + GameRulesResult data = new GameRulesResult(); + + boolean addDefaults = true; // add defaults in the first iteration + for (World world : this.server.getWorlds()) { + for (String gameRule : world.getGameRules()) { + GameRule ruleObj = GameRule.getByName(gameRule); + if (ruleObj == null) { + continue; + } + + if (addDefaults) { + Object defaultValue = world.getGameRuleDefault(ruleObj); + data.putDefault(gameRule, Objects.toString(defaultValue)); + } + + Object value = world.getGameRuleValue(ruleObj); + data.put(gameRule, world.getName(), Objects.toString(value)); + } + + addDefaults = false; + } + + return data; + } + + @SuppressWarnings("removal") + @Override + public Collection pollDataPacks() { + if (!SUPPORTS_DATAPACKS) { + return null; + } + + return this.server.getDataPackManager().getDataPacks().stream() + .map(pack -> new DataPackInfo( + pack.getTitle(), + pack.getDescription(), + pack.getSource().name().toLowerCase(Locale.ROOT).replace("_", "") + )) + .collect(Collectors.toList()); + } + static final class BukkitChunkInfo extends AbstractChunkInfo { private final CountMap entityCounts; diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/CommandMapUtil.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/CommandMapUtil.java index e604321f..9240d9b2 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/CommandMapUtil.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/CommandMapUtil.java @@ -21,7 +21,6 @@ package me.lucko.spark.bukkit; import com.google.common.base.Preconditions; - import org.bukkit.Bukkit; import org.bukkit.command.Command; import org.bukkit.command.CommandExecutor; diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/PaperTickHook.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/PaperTickHook.java index 43d5a247..d245623d 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/PaperTickHook.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/PaperTickHook.java @@ -21,10 +21,8 @@ package me.lucko.spark.bukkit; import com.destroystokyo.paper.event.server.ServerTickStartEvent; - import me.lucko.spark.common.tick.AbstractTickHook; import me.lucko.spark.common.tick.TickHook; - import org.bukkit.event.EventHandler; import org.bukkit.event.HandlerList; import org.bukkit.event.Listener; diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/PaperTickReporter.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/PaperTickReporter.java index f4a1ee92..587196e6 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/PaperTickReporter.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/PaperTickReporter.java @@ -21,10 +21,8 @@ package me.lucko.spark.bukkit; import com.destroystokyo.paper.event.server.ServerTickEndEvent; - import me.lucko.spark.common.tick.AbstractTickReporter; import me.lucko.spark.common.tick.TickReporter; - import org.bukkit.event.EventHandler; import org.bukkit.event.HandlerList; import org.bukkit.event.Listener; diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java index 7fa6e02c..11dadc99 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java @@ -20,13 +20,12 @@ package me.lucko.spark.bukkit.placeholder; -import me.lucko.spark.bukkit.BukkitSparkPlugin; -import me.lucko.spark.common.SparkPlatform; -import me.lucko.spark.common.util.SparkPlaceholder; - import be.maximvdw.placeholderapi.PlaceholderAPI; import be.maximvdw.placeholderapi.PlaceholderReplaceEvent; import be.maximvdw.placeholderapi.PlaceholderReplacer; +import me.lucko.spark.bukkit.BukkitSparkPlugin; +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.util.SparkPlaceholder; public class SparkMVdWPlaceholders implements PlaceholderReplacer { private final SparkPlatform platform; diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java index b3919ddd..7c599a75 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java @@ -24,7 +24,6 @@ import me.lucko.spark.bukkit.BukkitSparkPlugin; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.util.SparkPlaceholder; - import org.bukkit.OfflinePlayer; import org.bukkit.entity.Player; diff --git a/spark-bungeecord/build.gradle b/spark-bungeecord/build.gradle index bc0be7a7..8438d5e6 100644 --- a/spark-bungeecord/build.gradle +++ b/spark-bungeecord/build.gradle @@ -1,10 +1,10 @@ plugins { - id 'com.github.johnrengelman.shadow' version '8.1.1' + id 'com.gradleup.shadow' version '8.3.0' } dependencies { implementation project(':spark-common') - implementation 'net.kyori:adventure-platform-bungeecord:4.3.1' + implementation 'net.kyori:adventure-platform-bungeecord:4.3.3' compileOnly 'net.md-5:bungeecord-api:1.16-R0.4' } @@ -23,6 +23,7 @@ shadowJar { relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' + relocate 'net.kyori.option', 'me.lucko.spark.lib.adventure.option' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' @@ -33,6 +34,8 @@ shadowJar { exclude 'module-info.class' exclude 'META-INF/maven/**' exclude 'META-INF/proguard/**' + exclude '**/*.proto' + exclude '**/*.proto.bin' } artifacts { diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordClassSourceLookup.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordClassSourceLookup.java index 2024d54f..b0665fd6 100644 --- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordClassSourceLookup.java +++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordClassSourceLookup.java @@ -21,7 +21,6 @@ package me.lucko.spark.bungeecord; import me.lucko.spark.common.sampler.source.ClassSourceLookup; - import net.md_5.bungee.api.plugin.PluginDescription; import java.lang.reflect.Field; diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordCommandSender.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordCommandSender.java index 917111a6..51d1a7d5 100644 --- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordCommandSender.java +++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordCommandSender.java @@ -21,7 +21,6 @@ package me.lucko.spark.bungeecord; import me.lucko.spark.common.command.sender.AbstractCommandSender; - import net.kyori.adventure.audience.Audience; import net.kyori.adventure.platform.bungeecord.BungeeAudiences; import net.kyori.adventure.text.Component; diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlatformInfo.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlatformInfo.java index fc5c5886..df8cc8dc 100644 --- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlatformInfo.java +++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlatformInfo.java @@ -21,7 +21,6 @@ package me.lucko.spark.bungeecord; import me.lucko.spark.common.platform.PlatformInfo; - import net.md_5.bungee.api.ProxyServer; public class BungeeCordPlatformInfo implements PlatformInfo { @@ -41,6 +40,11 @@ public String getName() { return "BungeeCord"; } + @Override + public String getBrand() { + return this.proxy.getName(); + } + @Override public String getVersion() { return this.proxy.getVersion(); diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlayerPingProvider.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlayerPingProvider.java index 37955a30..fcb3e31a 100644 --- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlayerPingProvider.java +++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlayerPingProvider.java @@ -21,9 +21,7 @@ package me.lucko.spark.bungeecord; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.common.monitor.ping.PlayerPingProvider; - import net.md_5.bungee.api.ProxyServer; import net.md_5.bungee.api.connection.ProxiedPlayer; diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java index 71beddb1..85d72c77 100644 --- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java +++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java @@ -26,7 +26,6 @@ import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.sampler.source.SourceMetadata; - import net.kyori.adventure.platform.bungeecord.BungeeAudiences; import net.md_5.bungee.api.CommandSender; import net.md_5.bungee.api.plugin.Command; @@ -99,7 +98,8 @@ public Collection getKnownSources() { getProxy().getPluginManager().getPlugins(), plugin -> plugin.getDescription().getName(), plugin -> plugin.getDescription().getVersion(), - plugin -> plugin.getDescription().getAuthor() + plugin -> plugin.getDescription().getAuthor(), + plugin -> plugin.getDescription().getDescription() ); } @@ -130,5 +130,10 @@ public void execute(CommandSender sender, String[] args) { public Iterable onTabComplete(CommandSender sender, String[] args) { return this.plugin.platform.tabCompleteCommand(new BungeeCordCommandSender(sender, this.plugin.audienceFactory), args); } + + @Override + public boolean hasPermission(CommandSender sender) { + return this.plugin.platform.hasPermissionForAnyCommand(new BungeeCordCommandSender(sender, this.plugin.audienceFactory)); + } } } diff --git a/spark-common/build.gradle b/spark-common/build.gradle index 20805ad5..5bc4bdaf 100644 --- a/spark-common/build.gradle +++ b/spark-common/build.gradle @@ -1,15 +1,10 @@ -import org.cadixdev.gradle.licenser.LicenseExtension - plugins { - id 'com.google.protobuf' version '0.9.1' + id 'com.google.protobuf' version '0.9.4' } license { exclude '**/sampler/async/jfr/**' -} - -extensions.configure(LicenseExtension.class) { - it.exclude { + exclude { it.file.toString().startsWith(buildDir.toString()) } } @@ -17,25 +12,25 @@ extensions.configure(LicenseExtension.class) { dependencies { api project(':spark-api') implementation 'com.github.jvm-profiling-tools:async-profiler:v2.8.3' - implementation 'org.ow2.asm:asm:9.1' - implementation 'net.bytebuddy:byte-buddy-agent:1.11.0' - implementation 'com.google.protobuf:protobuf-javalite:3.21.11' + implementation 'org.ow2.asm:asm:9.7' + implementation 'net.bytebuddy:byte-buddy-agent:1.14.17' + implementation 'com.google.protobuf:protobuf-javalite:4.27.1' implementation('me.lucko:bytesocks-java-client:1.0-20230828.145440-5') { exclude(module: 'slf4j-api') } - api('net.kyori:adventure-api:4.13.1') { + api('net.kyori:adventure-api:4.17.0') { exclude(module: 'adventure-bom') exclude(module: 'checker-qual') exclude(module: 'annotations') } - api('net.kyori:adventure-text-serializer-gson:4.13.1') { + api('net.kyori:adventure-text-serializer-gson:4.17.0') { exclude(module: 'adventure-bom') exclude(module: 'adventure-api') exclude(module: 'gson') } - api('net.kyori:adventure-text-serializer-legacy:4.13.1') { + api('net.kyori:adventure-text-serializer-legacy:4.17.0') { exclude(module: 'adventure-bom') exclude(module: 'adventure-api') } @@ -44,12 +39,28 @@ dependencies { } compileOnly 'com.google.code.gson:gson:2.7' compileOnly 'com.google.guava:guava:19.0' - compileOnly 'org.checkerframework:checker-qual:3.8.0' + compileOnly 'org.checkerframework:checker-qual:3.44.0' + + testImplementation 'org.junit.jupiter:junit-jupiter-api:5.11.0-M2' + testImplementation 'org.junit.jupiter:junit-jupiter-engine:5.11.0-M2' + testImplementation 'org.junit.jupiter:junit-jupiter-params:5.11.0-M2' + // testImplementation "org.testcontainers:junit-jupiter:1.19.8" + // testImplementation 'org.mockito:mockito-core:5.12.0' + // testImplementation 'org.mockito:mockito-junit-jupiter:5.12.0' + + testImplementation 'com.google.code.gson:gson:2.7' + testImplementation 'com.google.guava:guava:19.0' + testImplementation 'org.checkerframework:checker-qual:3.44.0' + + testImplementation('net.kyori:adventure-text-serializer-ansi:4.17.0') { + exclude(module: 'adventure-bom') + exclude(module: 'adventure-api') + } } protobuf { protoc { - artifact = 'com.google.protobuf:protoc:3.21.11' + artifact = 'com.google.protobuf:protoc:4.27.1' } generateProtoTasks { all().each { task -> @@ -61,3 +72,8 @@ protobuf { } } } + +test { + useJUnitPlatform {} + systemProperty('net.kyori.ansi.colorLevel', 'indexed16') +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index ec1ea9a5..bedc605f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -45,6 +45,7 @@ import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.monitor.tick.SparkTickStatistics; import me.lucko.spark.common.monitor.tick.TickStatistics; +import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.PlatformStatisticsProvider; import me.lucko.spark.common.sampler.BackgroundSamplerManager; import me.lucko.spark.common.sampler.SamplerContainer; @@ -52,8 +53,12 @@ import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.common.util.BytebinClient; -import me.lucko.spark.common.util.Configuration; +import me.lucko.spark.common.util.SparkStaticLogger; import me.lucko.spark.common.util.TemporaryFiles; +import me.lucko.spark.common.util.classfinder.ClassFinder; +import me.lucko.spark.common.util.config.Configuration; +import me.lucko.spark.common.util.config.FileConfiguration; +import me.lucko.spark.common.util.config.RuntimeConfiguration; import me.lucko.spark.common.ws.TrustedKeyStore; import net.kyori.adventure.text.Component; import net.kyori.adventure.text.event.ClickEvent; @@ -69,11 +74,14 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantLock; import java.util.logging.Level; import java.util.stream.Collectors; +import java.util.stream.Stream; import static net.kyori.adventure.text.Component.space; import static net.kyori.adventure.text.Component.text; @@ -116,9 +124,17 @@ public class SparkPlatform { public SparkPlatform(SparkPlugin plugin) { this.plugin = plugin; + SparkStaticLogger.setLogger(plugin::log); - this.temporaryFiles = new TemporaryFiles(this.plugin.getPluginDirectory().resolve("tmp")); - this.configuration = new Configuration(this.plugin.getPluginDirectory().resolve("config.json")); + this.temporaryFiles = new TemporaryFiles(this.plugin.getPlatformInfo().getType() == PlatformInfo.Type.CLIENT + ? this.plugin.getPluginDirectory().resolve("tmp") + : this.plugin.getPluginDirectory().resolve("tmp-client") + ); + this.configuration = Configuration.combining( + RuntimeConfiguration.SYSTEM_PROPERTIES, + RuntimeConfiguration.ENVIRONMENT_VARIABLES, + new FileConfiguration(this.plugin.getPluginDirectory().resolve("config.json")) + ); this.viewerUrl = this.configuration.getString("viewerUrl", "https://spark.lucko.me/"); String bytebinUrl = this.configuration.getString("bytebinUrl", "https://spark-usercontent.lucko.me/"); @@ -283,6 +299,10 @@ public ClassSourceLookup createClassSourceLookup() { return this.plugin.createClassSourceLookup(); } + public ClassFinder createClassFinder() { + return this.plugin.createClassFinder(); + } + public TickStatistics getTickStatistics() { return this.tickStatistics; } @@ -299,6 +319,10 @@ public long getServerNormalOperationStartTime() { return this.serverNormalOperationStartTime; } + public boolean hasEnabled() { + return this.enabled.get(); + } + public Path resolveSaveFile(String prefix, String extension) { Path pluginFolder = this.plugin.getPluginDirectory(); try { @@ -319,11 +343,21 @@ private List getAvailableCommands(CommandSender sender) { .collect(Collectors.toList()); } + public Set getAllSparkPermissions() { + return Stream.concat( + Stream.of("spark"), + this.commands.stream() + .map(Command::primaryAlias) + .map(alias -> "spark." + alias) + ).collect(Collectors.toSet()); + } + public boolean hasPermissionForAnyCommand(CommandSender sender) { return !getAvailableCommands(sender).isEmpty(); } - public void executeCommand(CommandSender sender, String[] args) { + public CompletableFuture executeCommand(CommandSender sender, String[] args) { + CompletableFuture future = new CompletableFuture<>(); AtomicReference executorThread = new AtomicReference<>(); AtomicReference timeoutThread = new AtomicReference<>(); AtomicBoolean completed = new AtomicBoolean(false); @@ -334,9 +368,11 @@ public void executeCommand(CommandSender sender, String[] args) { this.commandExecuteLock.lock(); try { executeCommand0(sender, args); + future.complete(null); } catch (Exception e) { this.plugin.log(Level.SEVERE, "Exception occurred whilst executing a spark command"); e.printStackTrace(); + future.completeExceptionally(e); } finally { this.commandExecuteLock.unlock(); executorThread.set(null); @@ -355,6 +391,10 @@ public void executeCommand(CommandSender sender, String[] args) { int warningIntervalSeconds = 5; try { + if (completed.get()) { + return; + } + for (int i = 1; i <= 3; i++) { try { Thread.sleep(warningIntervalSeconds * 1000); @@ -386,6 +426,8 @@ public void executeCommand(CommandSender sender, String[] args) { timeoutThread.set(null); } }); + + return future; } private void executeCommand0(CommandSender sender, String[] args) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java index a3bdceb2..bf745dfd 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java @@ -33,6 +33,9 @@ import me.lucko.spark.common.sampler.source.SourceMetadata; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; +import me.lucko.spark.common.util.classfinder.ClassFinder; +import me.lucko.spark.common.util.classfinder.FallbackClassFinder; +import me.lucko.spark.common.util.classfinder.InstrumentationClassFinder; import java.nio.file.Path; import java.util.Collection; @@ -149,6 +152,18 @@ default ClassSourceLookup createClassSourceLookup() { return ClassSourceLookup.NO_OP; } + /** + * Creates a class finder for the platform. + * + * @return the class finder + */ + default ClassFinder createClassFinder() { + return ClassFinder.combining( + new InstrumentationClassFinder(this), + FallbackClassFinder.INSTANCE + ); + } + /** * Gets a list of known sources (plugins/mods) on the platform. * diff --git a/spark-common/src/main/java/me/lucko/spark/common/activitylog/Activity.java b/spark-common/src/main/java/me/lucko/spark/common/activitylog/Activity.java index 8b1b558d..c66888aa 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/activitylog/Activity.java +++ b/spark-common/src/main/java/me/lucko/spark/common/activitylog/Activity.java @@ -23,7 +23,6 @@ import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonPrimitive; - import me.lucko.spark.common.command.sender.CommandSender; import java.util.concurrent.TimeUnit; @@ -39,12 +38,12 @@ public final class Activity { private final String dataType; private final String dataValue; - public static Activity urlActivity(CommandSender user, long time, String type, String url) { - return new Activity(user.toData(), time, type, DATA_TYPE_URL, url); + public static Activity urlActivity(CommandSender.Data user, long time, String type, String url) { + return new Activity(user, time, type, DATA_TYPE_URL, url); } - public static Activity fileActivity(CommandSender user, long time, String type, String filePath) { - return new Activity(user.toData(), time, type, DATA_TYPE_FILE, filePath); + public static Activity fileActivity(CommandSender.Data user, long time, String type, String filePath) { + return new Activity(user, time, type, DATA_TYPE_FILE, filePath); } private Activity(CommandSender.Data user, long time, String type, String dataType, String dataValue) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/api/AbstractStatistic.java b/spark-common/src/main/java/me/lucko/spark/common/api/AbstractStatistic.java index 49a6ccb0..e7610132 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/api/AbstractStatistic.java +++ b/spark-common/src/main/java/me/lucko/spark/common/api/AbstractStatistic.java @@ -24,7 +24,6 @@ import me.lucko.spark.api.statistic.StatisticWindow; import me.lucko.spark.api.statistic.types.DoubleStatistic; import me.lucko.spark.api.statistic.types.GenericStatistic; - import org.checkerframework.checker.nullness.qual.NonNull; import java.lang.reflect.Array; diff --git a/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java b/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java index fc14c67f..3dfdab28 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java +++ b/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java @@ -22,7 +22,6 @@ import me.lucko.spark.api.gc.GarbageCollector; import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; - import org.checkerframework.checker.nullness.qual.NonNull; public class GarbageCollectorInfo implements GarbageCollector { diff --git a/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java index 9e4eee48..81b4f70f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java +++ b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java @@ -21,10 +21,10 @@ package me.lucko.spark.common.api; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.api.Spark; import me.lucko.spark.api.SparkProvider; import me.lucko.spark.api.gc.GarbageCollector; +import me.lucko.spark.api.placeholder.PlaceholderResolver; import me.lucko.spark.api.statistic.misc.DoubleAverageInfo; import me.lucko.spark.api.statistic.types.DoubleStatistic; import me.lucko.spark.api.statistic.types.GenericStatistic; @@ -32,7 +32,7 @@ import me.lucko.spark.common.monitor.cpu.CpuMonitor; import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; import me.lucko.spark.common.monitor.tick.TickStatistics; - +import me.lucko.spark.common.util.SparkPlaceholder; import org.checkerframework.checker.nullness.qual.NonNull; import org.checkerframework.checker.nullness.qual.Nullable; @@ -174,6 +174,21 @@ public DoubleAverageInfo poll(@NonNull MillisPerTick window) { return ImmutableMap.copyOf(map); } + @Override + public @NonNull PlaceholderResolver placeholders() { + return new PlaceholderResolver() { + @Override + public @Nullable String resolveLegacyFormatting(@NonNull String placeholder) { + return SparkPlaceholder.resolveFormattingCode(SparkApi.this.platform, placeholder); + } + + @Override + public @Nullable String resolveComponentJson(@NonNull String placeholder) { + return SparkPlaceholder.resolveComponentJson(SparkApi.this.platform, placeholder); + } + }; + } + public static void register(Spark spark) { try { SINGLETON_SET_METHOD.invoke(null, spark); diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java index c6871a95..3e87fa6e 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java @@ -21,10 +21,8 @@ package me.lucko.spark.common.command; import com.google.common.collect.ImmutableList; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; - import net.kyori.adventure.text.Component; import java.util.Collections; diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java index d1481bd6..3a894cad 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java @@ -22,11 +22,11 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; - import net.kyori.adventure.text.Component; import net.kyori.adventure.text.JoinConfiguration; import net.kyori.adventure.text.TextComponent; +import java.lang.ref.WeakReference; import java.util.Set; import java.util.function.Consumer; import java.util.stream.Collectors; @@ -49,20 +49,22 @@ public class CommandResponseHandler { .build(); private final SparkPlatform platform; - private final CommandSender sender; + private final CommandSender.Data senderData; + private final WeakReference sender; private String commandPrimaryAlias; public CommandResponseHandler(SparkPlatform platform, CommandSender sender) { this.platform = platform; - this.sender = sender; + this.senderData = sender.toData(); + this.sender = new WeakReference<>(sender); } public void setCommandPrimaryAlias(String commandPrimaryAlias) { this.commandPrimaryAlias = commandPrimaryAlias; } - public CommandSender sender() { - return this.sender; + public CommandSender.Data senderData() { + return this.senderData; } public void allSenders(Consumer action) { @@ -74,17 +76,24 @@ public void allSenders(Consumer action) { .filter(s -> s.hasPermission("spark") || s.hasPermission("spark." + this.commandPrimaryAlias)) .collect(Collectors.toSet()); - senders.add(this.sender); + CommandSender sender = this.sender.get(); + if (sender != null) { + senders.add(sender); + } + senders.forEach(action); } public void reply(Component message) { - this.sender.sendMessage(message); + CommandSender sender = this.sender.get(); + if (sender != null) { + sender.sendMessage(message); + } } public void reply(Iterable message) { Component joinedMsg = Component.join(JoinConfiguration.separator(Component.newline()), message); - this.sender.sendMessage(joinedMsg); + reply(joinedMsg); } public void broadcast(Component message) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java index 6252ac79..c72038bb 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java @@ -24,7 +24,6 @@ import me.lucko.spark.common.command.Command; import me.lucko.spark.common.command.CommandModule; import me.lucko.spark.common.command.tabcomplete.TabCompleter; - import net.kyori.adventure.text.Component; import net.kyori.adventure.text.TextComponent; import net.kyori.adventure.text.event.ClickEvent; diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java index a2da0a06..1d0c226c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java @@ -21,7 +21,6 @@ package me.lucko.spark.common.command.modules; import com.sun.management.GarbageCollectionNotificationInfo; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.Command; import me.lucko.spark.common.command.CommandModule; @@ -29,7 +28,6 @@ import me.lucko.spark.common.monitor.memory.GarbageCollectionMonitor; import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; import me.lucko.spark.common.util.FormatUtil; - import net.kyori.adventure.text.Component; import java.lang.management.MemoryUsage; diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java index 16eadc8a..fbf79ef6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java @@ -21,6 +21,7 @@ package me.lucko.spark.common.command.modules; import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.activitylog.Activity; import me.lucko.spark.common.command.Arguments; import me.lucko.spark.common.command.Command; import me.lucko.spark.common.command.CommandModule; @@ -35,11 +36,15 @@ import me.lucko.spark.common.monitor.ping.PingStatistics; import me.lucko.spark.common.monitor.ping.PingSummary; import me.lucko.spark.common.monitor.tick.TickStatistics; +import me.lucko.spark.common.platform.SparkMetadata; +import me.lucko.spark.common.sampler.Sampler; import me.lucko.spark.common.util.FormatUtil; +import me.lucko.spark.common.util.MediaTypes; import me.lucko.spark.common.util.RollingAverage; import me.lucko.spark.common.util.StatisticFormatter; - +import me.lucko.spark.proto.SparkProtos; import net.kyori.adventure.text.Component; +import net.kyori.adventure.text.event.ClickEvent; import java.lang.management.ManagementFactory; import java.lang.management.MemoryMXBean; @@ -85,10 +90,11 @@ public void registerCommands(Consumer consumer) { consumer.accept(Command.builder() .aliases("healthreport", "health", "ht") + .argumentUsage("upload", null) .argumentUsage("memory", null) .argumentUsage("network", null) .executor(HealthModule::healthReport) - .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--memory", "--network")) + .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--upload", "--memory", "--network")) .build() ); } @@ -186,6 +192,12 @@ private static void ping(SparkPlatform platform, CommandSender sender, CommandRe private static void healthReport(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { resp.replyPrefixed(text("Generating server health report...")); + + if (arguments.boolFlag("upload")) { + uploadHealthReport(platform, sender, resp, arguments); + return; + } + List report = new LinkedList<>(); report.add(empty()); @@ -210,6 +222,37 @@ private static void healthReport(SparkPlatform platform, CommandSender sender, C resp.reply(report); } + private static void uploadHealthReport(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { + SparkProtos.HealthMetadata.Builder metadata = SparkProtos.HealthMetadata.newBuilder(); + SparkMetadata.gather(platform, sender.toData(), platform.getStartupGcStatistics()).writeTo(metadata); + + SparkProtos.HealthData.Builder data = SparkProtos.HealthData.newBuilder() + .setMetadata(metadata); + + Sampler activeSampler = platform.getSamplerContainer().getActiveSampler(); + if (activeSampler != null) { + data.putAllTimeWindowStatistics(activeSampler.exportWindowStatistics()); + } + + try { + String key = platform.getBytebinClient().postContent(data.build(), MediaTypes.SPARK_HEALTH_MEDIA_TYPE).key(); + String url = platform.getViewerUrl() + key; + + resp.broadcastPrefixed(text("Health report:", GOLD)); + resp.broadcast(text() + .content(url) + .color(GRAY) + .clickEvent(ClickEvent.openUrl(url)) + .build() + ); + + platform.getActivityLog().addToLog(Activity.urlActivity(resp.senderData(), System.currentTimeMillis(), "Health report", url)); + } catch (Exception e) { + resp.broadcastPrefixed(text("An error occurred whilst uploading the data.", RED)); + e.printStackTrace(); + } + } + private static void addTickStats(List report, TickStatistics tickStatistics) { report.add(text() .append(text(">", DARK_GRAY, BOLD)) diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java index 6ac3b2f1..54f7df1f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java @@ -34,7 +34,6 @@ import me.lucko.spark.common.util.FormatUtil; import me.lucko.spark.common.util.MediaTypes; import me.lucko.spark.proto.SparkHeapProtos; - import net.kyori.adventure.text.event.ClickEvent; import java.io.IOException; @@ -90,7 +89,7 @@ private static void heapSummary(SparkPlatform platform, CommandSender sender, Co return; } - SparkHeapProtos.HeapData output = heapDump.toProto(platform, sender); + SparkHeapProtos.HeapData output = heapDump.toProto(platform, resp.senderData()); boolean saveToFile = false; if (arguments.boolFlag("save-to-file")) { @@ -108,7 +107,7 @@ private static void heapSummary(SparkPlatform platform, CommandSender sender, Co .build() ); - platform.getActivityLog().addToLog(Activity.urlActivity(sender, System.currentTimeMillis(), "Heap dump summary", url)); + platform.getActivityLog().addToLog(Activity.urlActivity(resp.senderData(), System.currentTimeMillis(), "Heap dump summary", url)); } catch (Exception e) { resp.broadcastPrefixed(text("An error occurred whilst uploading the data. Attempting to save to disk instead.", RED)); e.printStackTrace(); @@ -129,7 +128,7 @@ private static void heapSummary(SparkPlatform platform, CommandSender sender, Co ); resp.broadcastPrefixed(text("You can read the heap dump summary file using the viewer web-app - " + platform.getViewerUrl(), GRAY)); - platform.getActivityLog().addToLog(Activity.fileActivity(sender, System.currentTimeMillis(), "Heap dump summary", file.toString())); + platform.getActivityLog().addToLog(Activity.fileActivity(resp.senderData(), System.currentTimeMillis(), "Heap dump summary", file.toString())); } catch (IOException e) { resp.broadcastPrefixed(text("An error occurred whilst saving the data.", RED)); e.printStackTrace(); @@ -164,7 +163,7 @@ private static void heapDump(SparkPlatform platform, CommandSender sender, Comma .append(text(file.toString(), GRAY)) .build() ); - platform.getActivityLog().addToLog(Activity.fileActivity(sender, System.currentTimeMillis(), "Heap dump", file.toString())); + platform.getActivityLog().addToLog(Activity.fileActivity(resp.senderData(), System.currentTimeMillis(), "Heap dump", file.toString())); Compression compressionMethod = null; diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index ad0557d8..cc7d56b0 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -37,12 +37,11 @@ import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.sampler.ThreadGrouper; import me.lucko.spark.common.sampler.async.AsyncSampler; -import me.lucko.spark.common.sampler.node.MergeMode; +import me.lucko.spark.common.sampler.java.MergeStrategy; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.FormatUtil; import me.lucko.spark.common.util.MediaTypes; -import me.lucko.spark.common.util.MethodDisambiguator; import me.lucko.spark.common.ws.ViewerSocket; import me.lucko.spark.proto.SparkSamplerProtos; import net.kyori.adventure.text.Component; @@ -59,7 +58,9 @@ import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; +import java.util.function.Supplier; +import static net.kyori.adventure.text.Component.empty; import static net.kyori.adventure.text.Component.space; import static net.kyori.adventure.text.Component.text; import static net.kyori.adventure.text.format.NamedTextColor.DARK_GRAY; @@ -207,7 +208,7 @@ private void profilerStart(SparkPlatform platform, CommandSender sender, Command } } - ThreadGrouper threadGrouper; + Supplier threadGrouper; if (arguments.boolFlag("combine-all")) { threadGrouper = ThreadGrouper.AS_ONE; } else if (arguments.boolFlag("not-combined")) { @@ -435,7 +436,7 @@ private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, S .build() ); - platform.getActivityLog().addToLog(Activity.urlActivity(resp.sender(), System.currentTimeMillis(), "Profiler", url)); + platform.getActivityLog().addToLog(Activity.urlActivity(resp.senderData(), System.currentTimeMillis(), "Profiler", url)); } catch (Exception e) { resp.broadcastPrefixed(text("An error occurred whilst uploading the results. Attempting to save to disk instead.", RED)); e.printStackTrace(); @@ -452,7 +453,7 @@ private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, S resp.broadcastPrefixed(text("Data has been written to: " + file)); resp.broadcastPrefixed(text("You can view the profile file using the web app @ " + platform.getViewerUrl(), GRAY)); - platform.getActivityLog().addToLog(Activity.fileActivity(resp.sender(), System.currentTimeMillis(), "Profiler", file.toString())); + platform.getActivityLog().addToLog(Activity.fileActivity(resp.senderData(), System.currentTimeMillis(), "Profiler", file.toString())); } catch (IOException e) { resp.broadcastPrefixed(text("An error occurred whilst saving the data.", RED)); e.printStackTrace(); @@ -479,7 +480,22 @@ private void handleOpen(SparkPlatform platform, BytesocksClient bytesocksClient, .build() ); - platform.getActivityLog().addToLog(Activity.urlActivity(resp.sender(), System.currentTimeMillis(), "Profiler (live)", url)); + String cmd = "/" + platform.getPlugin().getCommandName() + " profiler stop"; + resp.broadcast(empty()); + resp.broadcast(text() + .append(text("(NOTE: this link is temporary and will expire after a short period of time. " + + "If you need a link to share with other people (e.g. in a bug report), please use ", GRAY)) + .append(text() + .content(cmd) + .color(WHITE) + .clickEvent(ClickEvent.runCommand(cmd)) + .build() + ) + .append(text(" instead.)", GRAY)) + .build() + ); + + platform.getActivityLog().addToLog(Activity.urlActivity(resp.senderData(), System.currentTimeMillis(), "Profiler (live)", url)); } catch (Exception e) { resp.replyPrefixed(text("An error occurred whilst opening the live profiler.", RED)); e.printStackTrace(); @@ -488,14 +504,9 @@ private void handleOpen(SparkPlatform platform, BytesocksClient bytesocksClient, private Sampler.ExportProps getExportProps(SparkPlatform platform, CommandResponseHandler resp, Arguments arguments) { return new Sampler.ExportProps() - .creator(resp.sender().toData()) + .creator(resp.senderData()) .comment(Iterables.getFirst(arguments.stringFlag("comment"), null)) - .mergeMode(() -> { - MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); - return arguments.boolFlag("separate-parent-calls") - ? MergeMode.separateParentCalls(methodDisambiguator) - : MergeMode.sameMethod(methodDisambiguator); - }) + .mergeStrategy(arguments.boolFlag("separate-parent-calls") ? MergeStrategy.SEPARATE_PARENT_CALLS : MergeStrategy.SAME_METHOD) .classSourceLookup(() -> ClassSourceLookup.create(platform)); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java index f5f4fce5..85dfaf1d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java @@ -28,7 +28,6 @@ import me.lucko.spark.common.monitor.tick.ReportPredicate; import me.lucko.spark.common.monitor.tick.TickMonitor; import me.lucko.spark.common.tick.TickHook; - import net.kyori.adventure.text.Component; import net.kyori.adventure.text.format.NamedTextColor; diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java b/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java index bae5ddfa..4dc53eb6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java @@ -23,9 +23,7 @@ import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonPrimitive; - import me.lucko.spark.proto.SparkProtos.CommandSenderMetadata; - import net.kyori.adventure.text.Component; import java.util.UUID; @@ -36,6 +34,10 @@ public interface CommandSender { UUID getUniqueId(); + default boolean isPlayer() { + return getUniqueId() != null; + } + void sendMessage(Component message); boolean hasPermission(String permission); diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java index 955bafe9..707adbb3 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java +++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java @@ -20,15 +20,14 @@ package me.lucko.spark.common.heapdump; +import javax.management.JMX; +import javax.management.MBeanServer; +import javax.management.ObjectName; import java.io.IOException; import java.lang.management.ManagementFactory; import java.lang.reflect.Method; import java.nio.file.Path; -import javax.management.JMX; -import javax.management.MBeanServer; -import javax.management.ObjectName; - /** * Utility for creating .hprof memory heap snapshots. */ diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java index eaedd312..a5e7039d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java +++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java @@ -22,12 +22,16 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; +import me.lucko.spark.common.platform.SparkMetadata; import me.lucko.spark.proto.SparkHeapProtos.HeapData; import me.lucko.spark.proto.SparkHeapProtos.HeapEntry; import me.lucko.spark.proto.SparkHeapProtos.HeapMetadata; - +import org.jetbrains.annotations.VisibleForTesting; import org.objectweb.asm.Type; +import javax.management.JMX; +import javax.management.MBeanServer; +import javax.management.ObjectName; import java.lang.management.ManagementFactory; import java.util.Arrays; import java.util.List; @@ -36,10 +40,6 @@ import java.util.regex.Pattern; import java.util.stream.Collectors; -import javax.management.JMX; -import javax.management.MBeanServer; -import javax.management.ObjectName; - /** * Represents a "heap dump summary" from the VM. * @@ -125,21 +125,14 @@ private HeapDumpSummary(List entries) { this.entries = entries; } - public HeapData toProto(SparkPlatform platform, CommandSender creator) { - HeapMetadata.Builder metadata = HeapMetadata.newBuilder() - .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto()) - .setCreator(creator.toData().toProto()); - try { - metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(null, true)); - } catch (Exception e) { - e.printStackTrace(); - } + @VisibleForTesting + List getEntries() { + return this.entries; + } - try { - metadata.setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics()); - } catch (Exception e) { - e.printStackTrace(); - } + public HeapData toProto(SparkPlatform platform, CommandSender.Data creator) { + HeapMetadata.Builder metadata = HeapMetadata.newBuilder(); + SparkMetadata.gather(platform, creator, platform.getStartupGcStatistics()).writeTo(metadata); HeapData.Builder proto = HeapData.newBuilder(); proto.setMetadata(metadata); @@ -188,6 +181,16 @@ public HeapEntry toProto() { .setType(this.type) .build(); } + + @Override + public String toString() { + return "Entry{" + + "order=" + this.order + + ", instances=" + this.instances + + ", bytes=" + this.bytes + + ", type='" + this.type + '\'' + + '}'; + } } public interface DiagnosticCommandMXBean { diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/MacosSysctl.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/MacosSysctl.java new file mode 100644 index 00000000..c279f311 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/MacosSysctl.java @@ -0,0 +1,67 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.monitor; + +import org.checkerframework.checker.nullness.qual.NonNull; + +import java.io.BufferedReader; +import java.io.InputStreamReader; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Locale; + +/** + * Utility for reading from sysctl on macOS systems. + */ +public enum MacosSysctl { + + SYSCTL("sysctl", "-a"),; + + private static final boolean SUPPORTED = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", "").equals("macosx"); + + private final String[] cmdArgs; + + MacosSysctl(String... cmdArgs) { + this.cmdArgs = cmdArgs; + } + + public @NonNull List read() { + if (SUPPORTED) { + ProcessBuilder process = new ProcessBuilder(this.cmdArgs).redirectErrorStream(true); + try (BufferedReader buf = new BufferedReader(new InputStreamReader(process.start().getInputStream()))) { + List lines = new ArrayList<>(); + + String line; + while ((line = buf.readLine()) != null) { + lines.add(line); + } + + return lines; + } catch (Exception e) { + // ignore + } + } + + return Collections.emptyList(); + } +} + diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java index 9954bd51..07875cc3 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java @@ -21,6 +21,7 @@ package me.lucko.spark.common.monitor.cpu; import me.lucko.spark.common.monitor.LinuxProc; +import me.lucko.spark.common.monitor.MacosSysctl; import me.lucko.spark.common.monitor.WindowsWmic; import java.util.regex.Pattern; @@ -52,6 +53,12 @@ public static String queryCpuModel() { } } + for (String line : MacosSysctl.SYSCTL.read()) { + if (line.startsWith("machdep.cpu.brand_string:")) { + return line.substring("machdep.cpu.brand_string:".length()).trim(); + } + } + return ""; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java index b4ab8315..987af7b5 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java @@ -23,13 +23,12 @@ import me.lucko.spark.common.monitor.MonitoringExecutor; import me.lucko.spark.common.util.RollingAverage; -import java.lang.management.ManagementFactory; -import java.math.BigDecimal; -import java.util.concurrent.TimeUnit; - import javax.management.JMX; import javax.management.MBeanServer; import javax.management.ObjectName; +import java.lang.management.ManagementFactory; +import java.math.BigDecimal; +import java.util.concurrent.TimeUnit; /** * Exposes and monitors the system/process CPU usage. diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectionMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectionMonitor.java index 9bff1e24..d9abf9b6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectionMonitor.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectionMonitor.java @@ -22,16 +22,15 @@ import com.sun.management.GarbageCollectionNotificationInfo; -import java.lang.management.GarbageCollectorMXBean; -import java.lang.management.ManagementFactory; -import java.util.ArrayList; -import java.util.List; - import javax.management.ListenerNotFoundException; import javax.management.Notification; import javax.management.NotificationEmitter; import javax.management.NotificationListener; import javax.management.openmbean.CompositeData; +import java.lang.management.GarbageCollectorMXBean; +import java.lang.management.ManagementFactory; +import java.util.ArrayList; +import java.util.List; /** * Monitoring process for garbage collections. diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java index 8f63f712..b260d7ee 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java @@ -22,13 +22,12 @@ import me.lucko.spark.common.monitor.LinuxProc; -import java.lang.management.ManagementFactory; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - import javax.management.JMX; import javax.management.MBeanServer; import javax.management.ObjectName; +import java.lang.management.ManagementFactory; +import java.util.regex.Matcher; +import java.util.regex.Pattern; /** * Utility to query information about system memory usage. diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java index 332077a4..01bd3a7b 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java @@ -21,10 +21,9 @@ package me.lucko.spark.common.monitor.net; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.common.monitor.LinuxProc; - import org.checkerframework.checker.nullness.qual.NonNull; +import org.jetbrains.annotations.VisibleForTesting; import java.util.Arrays; import java.util.Collections; @@ -202,7 +201,8 @@ public NetworkInterfaceInfo subtract(NetworkInterfaceInfo other) { private static final Pattern PROC_NET_DEV_PATTERN = Pattern.compile("^\\s*(\\w+):([\\d\\s]+)$"); - private static @NonNull Map read(List output) { + @VisibleForTesting + static @NonNull Map read(List output) { // Inter-| Receive | Transmit // face |bytes packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed // lo: 2776770 11307 0 0 0 0 0 0 2776770 11307 0 0 0 0 0 0 diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java index 49fcbe1b..803a1836 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java @@ -22,7 +22,6 @@ import me.lucko.spark.common.monitor.MonitoringExecutor; import me.lucko.spark.common.util.RollingAverage; - import org.checkerframework.checker.nullness.qual.Nullable; import java.math.BigDecimal; diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java index 944fa83e..f9b41f31 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java @@ -21,11 +21,9 @@ package me.lucko.spark.common.monitor.tick; import com.sun.management.GarbageCollectionNotificationInfo; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.monitor.memory.GarbageCollectionMonitor; import me.lucko.spark.common.tick.TickHook; - import net.kyori.adventure.text.Component; import java.text.DecimalFormat; diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java index 082389d5..1d71d535 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java @@ -30,6 +30,8 @@ public interface PlatformInfo { String getName(); + String getBrand(); + String getVersion(); String getMinecraftVersion(); @@ -40,7 +42,7 @@ default int getSparkVersion() { } default Data toData() { - return new Data(getType(), getName(), getVersion(), getMinecraftVersion(), getSparkVersion()); + return new Data(getType(), getName(), getBrand(), getVersion(), getMinecraftVersion(), getSparkVersion()); } enum Type { @@ -62,13 +64,15 @@ public PlatformMetadata.Type toProto() { final class Data { private final Type type; private final String name; + private final String brand; private final String version; private final String minecraftVersion; private final int sparkVersion; - public Data(Type type, String name, String version, String minecraftVersion, int sparkVersion) { + public Data(Type type, String name, String brand, String version, String minecraftVersion, int sparkVersion) { this.type = type; this.name = name; + this.brand = brand; this.version = version; this.minecraftVersion = minecraftVersion; this.sparkVersion = sparkVersion; @@ -82,6 +86,10 @@ public String getName() { return this.name; } + public String getBrand() { + return this.brand; + } + public String getVersion() { return this.version; } @@ -98,6 +106,7 @@ public PlatformMetadata toProto() { PlatformMetadata.Builder proto = PlatformMetadata.newBuilder() .setType(this.type.toProto()) .setName(this.name) + .setBrand(this.brand) .setVersion(this.version) .setSparkVersion(this.sparkVersion); diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java index b0987c9a..e8401fcc 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java @@ -22,6 +22,7 @@ import me.lucko.spark.api.statistic.misc.DoubleAverageInfo; import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.monitor.cpu.CpuInfo; import me.lucko.spark.common.monitor.cpu.CpuMonitor; import me.lucko.spark.common.monitor.disk.DiskUsage; @@ -40,9 +41,16 @@ import me.lucko.spark.proto.SparkProtos.WorldStatistics; import java.lang.management.ManagementFactory; +import java.lang.management.MemoryPoolMXBean; +import java.lang.management.MemoryType; import java.lang.management.MemoryUsage; import java.lang.management.RuntimeMXBean; +import java.util.List; import java.util.Map; +import java.util.UUID; +import java.util.logging.Level; +import java.util.regex.Pattern; +import java.util.stream.Collectors; public class PlatformStatisticsProvider { private final SparkPlatform platform; @@ -55,6 +63,8 @@ public SystemStatistics getSystemStatistics() { RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean(); OperatingSystemInfo osInfo = OperatingSystemInfo.poll(); + String vmArgs = String.join(" ", runtimeBean.getInputArguments()); + SystemStatistics.Builder builder = SystemStatistics.newBuilder() .setCpu(SystemStatistics.Cpu.newBuilder() .setThreads(Runtime.getRuntime().availableProcessors()) @@ -99,7 +109,13 @@ public SystemStatistics getSystemStatistics() { .setVendor(System.getProperty("java.vendor", "unknown")) .setVersion(System.getProperty("java.version", "unknown")) .setVendorVersion(System.getProperty("java.vendor.version", "unknown")) - .setVmArgs(String.join(" ", runtimeBean.getInputArguments())) + .setVmArgs(VmArgRedactor.replace(vmArgs)) + .build() + ) + .setJvm(SystemStatistics.Jvm.newBuilder() + .setName(System.getProperty("java.vm.name", "unknown")) + .setVendor(System.getProperty("java.vm.vendor", "unknown")) + .setVersion(System.getProperty("java.vm.version", "unknown")) .build() ); @@ -130,18 +146,35 @@ public SystemStatistics getSystemStatistics() { return builder.build(); } - public PlatformStatistics getPlatformStatistics(Map startingGcStatistics, boolean includeWorld) { + public PlatformStatistics getPlatformStatistics(Map startingGcStatistics, boolean includeWorldStatistics) { PlatformStatistics.Builder builder = PlatformStatistics.newBuilder(); - MemoryUsage memoryUsage = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage(); - builder.setMemory(PlatformStatistics.Memory.newBuilder() - .setHeap(PlatformStatistics.Memory.MemoryPool.newBuilder() - .setUsed(memoryUsage.getUsed()) - .setTotal(memoryUsage.getCommitted()) - .build() - ) - .build() - ); + PlatformStatistics.Memory.Builder memory = PlatformStatistics.Memory.newBuilder() + .setHeap(memoryUsageProto(ManagementFactory.getMemoryMXBean().getHeapMemoryUsage())) + .setNonHeap(memoryUsageProto(ManagementFactory.getMemoryMXBean().getNonHeapMemoryUsage())); + + List memoryPoolMXBeans = ManagementFactory.getMemoryPoolMXBeans(); + for (MemoryPoolMXBean memoryPool : memoryPoolMXBeans) { + if (memoryPool.getType() != MemoryType.HEAP) { + continue; + } + + MemoryUsage usage = memoryPool.getUsage(); + MemoryUsage collectionUsage = memoryPool.getCollectionUsage(); + + if (usage.getMax() == -1) { + usage = new MemoryUsage(usage.getInit(), usage.getUsed(), usage.getCommitted(), usage.getCommitted()); + } + + memory.addPools(PlatformStatistics.Memory.MemoryPool.newBuilder() + .setName(memoryPool.getName()) + .setUsage(memoryUsageProto(usage)) + .setCollectionUsage(memoryUsageProto(collectionUsage)) + .build() + ); + } + + builder.setMemory(memory.build()); long uptime = System.currentTimeMillis() - this.platform.getServerNormalOperationStartTime(); builder.setUptime(uptime); @@ -183,13 +216,29 @@ public PlatformStatistics getPlatformStatistics(Map senders = this.platform.getPlugin().getCommandSenders().collect(Collectors.toList()); + PlatformInfo.Type platformType = this.platform.getPlugin().getPlatformInfo().getType(); if (platformType != PlatformInfo.Type.CLIENT) { - long playerCount = this.platform.getPlugin().getCommandSenders().count() - 1; // includes console + long playerCount = senders.size() - 1; // includes console builder.setPlayerCount(playerCount); } - if (includeWorld) { + UUID anyOnlinePlayerUniqueId = senders.stream() + .filter(CommandSender::isPlayer) + .map(CommandSender::getUniqueId) + .filter(uniqueId -> uniqueId.version() == 4 || uniqueId.version() == 3) + .findAny() + .orElse(null); + + builder.setOnlineMode(anyOnlinePlayerUniqueId == null + ? PlatformStatistics.OnlineMode.UNKNOWN + : anyOnlinePlayerUniqueId.version() == 4 + ? PlatformStatistics.OnlineMode.ONLINE + : PlatformStatistics.OnlineMode.OFFLINE + ); + + if (includeWorldStatistics) { try { WorldStatisticsProvider worldStatisticsProvider = new WorldStatisticsProvider( new AsyncWorldInfoProvider(this.platform, this.platform.getPlugin().createWorldInfoProvider()) @@ -199,7 +248,7 @@ public PlatformStatistics getPlatformStatistics(Map"); + input = MACOS_USERNAME.matcher(input).replaceAll("/Users/"); + input = LINUX_USERNAME.matcher(input).replaceAll("/home/"); + return input; + } + } + } diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/SparkMetadata.java b/spark-common/src/main/java/me/lucko/spark/common/platform/SparkMetadata.java new file mode 100644 index 00000000..4b68921e --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/SparkMetadata.java @@ -0,0 +1,154 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.command.sender.CommandSender; +import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; +import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; +import me.lucko.spark.common.sampler.source.SourceMetadata; +import me.lucko.spark.proto.SparkHeapProtos.HeapMetadata; +import me.lucko.spark.proto.SparkProtos.HealthMetadata; +import me.lucko.spark.proto.SparkProtos.PlatformMetadata; +import me.lucko.spark.proto.SparkProtos.PlatformStatistics; +import me.lucko.spark.proto.SparkProtos.SystemStatistics; +import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; + +import java.util.Collection; +import java.util.Locale; +import java.util.Map; +import java.util.logging.Level; + +public class SparkMetadata { + + public static SparkMetadata gather(SparkPlatform platform, CommandSender.Data creator, Map initialGcStats) { + PlatformMetadata platformMetadata = platform.getPlugin().getPlatformInfo().toData().toProto(); + + PlatformStatistics platformStatistics = null; + try { + platformStatistics = platform.getStatisticsProvider().getPlatformStatistics(initialGcStats, true); + } catch (Exception e) { + platform.getPlugin().log(Level.WARNING, "Failed to gather platform statistics - " + e); + } + + SystemStatistics systemStatistics = null; + try { + systemStatistics = platform.getStatisticsProvider().getSystemStatistics(); + } catch (Exception e) { + platform.getPlugin().log(Level.WARNING, "Failed to gather system statistics - " + e); + } + + long generatedTime = System.currentTimeMillis(); + + Map serverConfigurations = null; + try { + ServerConfigProvider serverConfigProvider = platform.getPlugin().createServerConfigProvider(); + if (serverConfigProvider != null) { + serverConfigurations = serverConfigProvider.export(); + } + } catch (Exception e) { + platform.getPlugin().log(Level.WARNING, "Failed to gather server configurations - " + e); + } + + Collection sources = platform.getPlugin().getKnownSources(); + + Map extraPlatformMetadata = null; + try { + MetadataProvider extraMetadataProvider = platform.getPlugin().createExtraMetadataProvider(); + if (extraMetadataProvider != null) { + extraPlatformMetadata = extraMetadataProvider.export(); + } + } catch (Exception e) { + platform.getPlugin().log(Level.WARNING, "Failed to gather extra platform metadata - " + e); + } + + return new SparkMetadata(creator, platformMetadata, platformStatistics, systemStatistics, generatedTime, serverConfigurations, sources, extraPlatformMetadata); + } + + private final CommandSender.Data creator; + private final PlatformMetadata platformMetadata; + private final PlatformStatistics platformStatistics; + private final SystemStatistics systemStatistics; + private final long generatedTime; + private final Map serverConfigurations; + private final Collection sources; + private final Map extraPlatformMetadata; + + public SparkMetadata(CommandSender.Data creator, PlatformMetadata platformMetadata, PlatformStatistics platformStatistics, SystemStatistics systemStatistics, long generatedTime, Map serverConfigurations, Collection sources, Map extraPlatformMetadata) { + this.creator = creator; + this.platformMetadata = platformMetadata; + this.platformStatistics = platformStatistics; + this.systemStatistics = systemStatistics; + this.generatedTime = generatedTime; + this.serverConfigurations = serverConfigurations; + this.sources = sources; + this.extraPlatformMetadata = extraPlatformMetadata; + } + + @SuppressWarnings("DuplicatedCode") + public void writeTo(HealthMetadata.Builder builder) { + if (this.creator != null) builder.setCreator(this.creator.toProto()); + if (this.platformMetadata != null) builder.setPlatformMetadata(this.platformMetadata); + if (this.platformStatistics != null) builder.setPlatformStatistics(this.platformStatistics); + if (this.systemStatistics != null) builder.setSystemStatistics(this.systemStatistics); + builder.setGeneratedTime(this.generatedTime); + if (this.serverConfigurations != null) builder.putAllServerConfigurations(this.serverConfigurations); + if (this.sources != null) { + for (SourceMetadata source : this.sources) { + builder.putSources(source.getName().toLowerCase(Locale.ROOT), source.toProto()); + } + } + if (this.extraPlatformMetadata != null) builder.putAllExtraPlatformMetadata(this.extraPlatformMetadata); + } + + @SuppressWarnings("DuplicatedCode") + public void writeTo(SamplerMetadata.Builder builder) { + if (this.creator != null) builder.setCreator(this.creator.toProto()); + if (this.platformMetadata != null) builder.setPlatformMetadata(this.platformMetadata); + if (this.platformStatistics != null) builder.setPlatformStatistics(this.platformStatistics); + if (this.systemStatistics != null) builder.setSystemStatistics(this.systemStatistics); + builder.setEndTime(this.generatedTime); + if (this.serverConfigurations != null) builder.putAllServerConfigurations(this.serverConfigurations); + if (this.sources != null) { + for (SourceMetadata source : this.sources) { + builder.putSources(source.getName().toLowerCase(Locale.ROOT), source.toProto()); + } + } + if (this.extraPlatformMetadata != null) builder.putAllExtraPlatformMetadata(this.extraPlatformMetadata); + } + + @SuppressWarnings("DuplicatedCode") + public void writeTo(HeapMetadata.Builder builder) { + if (this.creator != null) builder.setCreator(this.creator.toProto()); + if (this.platformMetadata != null) builder.setPlatformMetadata(this.platformMetadata); + if (this.platformStatistics != null) builder.setPlatformStatistics(this.platformStatistics); + if (this.systemStatistics != null) builder.setSystemStatistics(this.systemStatistics); + builder.setGeneratedTime(this.generatedTime); + if (this.serverConfigurations != null) builder.putAllServerConfigurations(this.serverConfigurations); + if (this.sources != null) { + for (SourceMetadata source : this.sources) { + builder.putSources(source.getName().toLowerCase(Locale.ROOT), source.toProto()); + } + } + if (this.extraPlatformMetadata != null) builder.putAllExtraPlatformMetadata(this.extraPlatformMetadata); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java index 485f2158..6503df9f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java @@ -22,7 +22,6 @@ import com.google.common.collect.ImmutableMap; import com.google.gson.JsonElement; - import me.lucko.spark.common.platform.MetadataProvider; import java.util.Arrays; diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java index 82cddeff..707097a8 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java @@ -23,6 +23,7 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; +import java.util.Collection; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; @@ -80,6 +81,14 @@ public CompletableFuture>> return async(WorldInfoProvider::pollChunks); } + public CompletableFuture pollGameRules() { + return async(WorldInfoProvider::pollGameRules); + } + + public CompletableFuture> pollDataPacks() { + return async(WorldInfoProvider::pollDataPacks); + } + public WorldInfoProvider.CountsResult getCounts() { return get(pollCounts()); } @@ -87,4 +96,12 @@ public WorldInfoProvider.CountsResult getCounts() { public WorldInfoProvider.ChunksResult> getChunks() { return get(pollChunks()); } + + public WorldInfoProvider.GameRulesResult getGameRules() { + return get(pollGameRules()); + } + + public Collection getDataPacks() { + return get(pollDataPacks()); + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java index 7fb581db..457f8c9b 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java @@ -20,6 +20,7 @@ package me.lucko.spark.common.platform.world; +import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -39,6 +40,16 @@ public CountsResult pollCounts() { public ChunksResult> pollChunks() { return null; } + + @Override + public GameRulesResult pollGameRules() { + return null; + } + + @Override + public Collection pollDataPacks() { + return null; + } }; /** @@ -55,6 +66,20 @@ public ChunksResult> pollChunks() { */ ChunksResult> pollChunks(); + /** + * Polls for game rules. + * + * @return the game rules + */ + GameRulesResult pollGameRules(); + + /** + * Polls for data packs. + * + * @return the data packs + */ + Collection pollDataPacks(); + default boolean mustCallSync() { return true; } @@ -101,4 +126,61 @@ public int chunks() { } } + final class GameRulesResult { + private final Map rules = new HashMap<>(); + + private GameRule rule(String name) { + return this.rules.computeIfAbsent(name, k -> new GameRule()); + } + + public void put(String gameRuleName, String worldName, String value) { + rule(gameRuleName).worldValues.put(worldName, value); + } + + public void putDefault(String gameRuleName, String value) { + rule(gameRuleName).defaultValue = value; + } + + public Map getRules() { + return this.rules; + } + + public static final class GameRule { + Map worldValues = new HashMap<>(); + String defaultValue = null; + + public String getDefaultValue() { + return this.defaultValue; + } + + public Map getWorldValues() { + return this.worldValues; + } + } + } + + final class DataPackInfo { + private final String name; + private final String description; + private final String source; + + public DataPackInfo(String name, String description, String source) { + this.name = name; + this.description = description; + this.source = source; + } + + public String name() { + return this.name; + } + + public String description() { + return this.description; + } + + public String source() { + return this.source; + } + } + } diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java index 7e63222c..58183a09 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java @@ -23,6 +23,7 @@ import me.lucko.spark.proto.SparkProtos.WorldStatistics; import java.util.ArrayList; +import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; @@ -38,7 +39,7 @@ public WorldStatisticsProvider(AsyncWorldInfoProvider provider) { } public WorldStatistics getWorldStatistics() { - WorldInfoProvider.ChunksResult> result = provider.getChunks(); + WorldInfoProvider.ChunksResult> result = this.provider.getChunks(); if (result == null) { return null; } @@ -70,6 +71,26 @@ public WorldStatistics getWorldStatistics() { stats.setTotalEntities(combinedTotal.get()); combined.asMap().forEach((key, value) -> stats.putEntityCounts(key, value.get())); + WorldInfoProvider.GameRulesResult gameRules = this.provider.getGameRules(); + if (gameRules != null) { + gameRules.getRules().forEach((ruleName, rule) -> stats.addGameRules(WorldStatistics.GameRule.newBuilder() + .setName(ruleName) + .setDefaultValue(rule.getDefaultValue()) + .putAllWorldValues(rule.getWorldValues()) + .build() + )); + } + + Collection dataPacks = this.provider.getDataPacks(); + if (dataPacks != null) { + dataPacks.forEach(dataPack -> stats.addDataPacks(WorldStatistics.DataPack.newBuilder() + .setName(dataPack.name()) + .setDescription(dataPack.description()) + .setSource(dataPack.source()) + .build() + )); + } + return stats.build(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index 445702ef..aecdc713 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -23,15 +23,14 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; -import me.lucko.spark.common.platform.MetadataProvider; -import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; +import me.lucko.spark.common.platform.SparkMetadata; import me.lucko.spark.common.sampler.aggregator.DataAggregator; -import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; +import me.lucko.spark.common.sampler.node.exporter.NodeExporter; import me.lucko.spark.common.sampler.source.ClassSourceLookup; -import me.lucko.spark.common.sampler.source.SourceMetadata; import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; import me.lucko.spark.common.sampler.window.WindowStatisticsCollector; +import me.lucko.spark.common.util.classfinder.ClassFinder; import me.lucko.spark.common.ws.ViewerSocket; import me.lucko.spark.proto.SparkProtos; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; @@ -40,10 +39,11 @@ import java.util.Collection; import java.util.Comparator; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.function.Function; +import java.util.function.Supplier; /** * Base implementation class for {@link Sampler}s. @@ -120,6 +120,11 @@ protected Map getInitialGcStats() { return this.initialGcStats; } + @Override + public Map exportWindowStatistics() { + return this.windowStatisticsCollector.export(); + } + @Override public void start() { this.startTime = System.currentTimeMillis(); @@ -174,15 +179,15 @@ protected void sendStatisticsToSocket() { protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, CommandSender.Data creator, String comment, DataAggregator dataAggregator) { SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder() + .setSamplerEngine(getType().asProto()) .setSamplerMode(getMode().asProto()) - .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto()) - .setCreator(creator.toProto()) .setStartTime(this.startTime) - .setEndTime(System.currentTimeMillis()) .setInterval(this.interval) .setThreadDumper(this.threadDumper.getMetadata()) .setDataAggregator(dataAggregator.getMetadata()); + SparkMetadata.gather(platform, creator, getInitialGcStats()).writeTo(metadata); + if (comment != null) { metadata.setComment(comment); } @@ -192,49 +197,14 @@ protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform pla metadata.setNumberOfTicks(totalTicks); } - try { - metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(getInitialGcStats(), true)); - } catch (Exception e) { - e.printStackTrace(); - } - - try { - metadata.setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics()); - } catch (Exception e) { - e.printStackTrace(); - } - - try { - ServerConfigProvider serverConfigProvider = platform.getPlugin().createServerConfigProvider(); - if (serverConfigProvider != null) { - metadata.putAllServerConfigurations(serverConfigProvider.export()); - } - } catch (Exception e) { - e.printStackTrace(); - } - - try { - MetadataProvider extraMetadataProvider = platform.getPlugin().createExtraMetadataProvider(); - if (extraMetadataProvider != null) { - metadata.putAllExtraPlatformMetadata(extraMetadataProvider.export()); - } - } catch (Exception e) { - e.printStackTrace(); - } - - Collection knownSources = platform.getPlugin().getKnownSources(); - for (SourceMetadata source : knownSources) { - metadata.putSources(source.getName().toLowerCase(Locale.ROOT), source.toProto()); - } - proto.setMetadata(metadata); } - protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { + protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, Function nodeExporterFunction, ClassSourceLookup classSourceLookup, Supplier classFinderSupplier) { List data = dataAggregator.exportData(); data.sort(Comparator.comparing(ThreadNode::getThreadLabel)); - ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup); + ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup, classFinderSupplier); ProtoTimeEncoder timeEncoder = new ProtoTimeEncoder(getMode().valueTransformer(), data); int[] timeWindows = timeEncoder.getKeys(); @@ -245,8 +215,10 @@ protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAg this.windowStatisticsCollector.ensureHasStatisticsForAllWindows(timeWindows); proto.putAllTimeWindowStatistics(this.windowStatisticsCollector.export()); + NodeExporter exporter = nodeExporterFunction.apply(timeEncoder); + for (ThreadNode entry : data) { - proto.addThreads(entry.toProto(mergeMode, timeEncoder)); + proto.addThreads(exporter.export(entry)); classSourceVisitor.visit(entry); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java index 4e9ca9e0..18893049 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java @@ -22,8 +22,9 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.platform.PlatformInfo; -import me.lucko.spark.common.util.Configuration; +import me.lucko.spark.common.util.config.Configuration; +import java.util.function.Supplier; import java.util.logging.Level; public class BackgroundSamplerManager { @@ -103,7 +104,7 @@ public boolean restartBackgroundSampler() { private void startSampler() { boolean forceJavaEngine = this.configuration.getString(OPTION_ENGINE, "async").equals("java"); - ThreadGrouper threadGrouper = ThreadGrouper.parseConfigSetting(this.configuration.getString(OPTION_THREAD_GROUPER, "by-pool")); + Supplier threadGrouper = ThreadGrouper.parseConfigSetting(this.configuration.getString(OPTION_THREAD_GROUPER, "by-pool")); ThreadDumper threadDumper = ThreadDumper.parseConfigSetting(this.configuration.getString(OPTION_THREAD_DUMPER, "default")); if (threadDumper == null) { threadDumper = this.platform.getPlugin().getDefaultThreadDumper(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index 844ab0bf..71ab0399 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -22,13 +22,15 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; -import me.lucko.spark.common.sampler.node.MergeMode; +import me.lucko.spark.common.sampler.java.MergeStrategy; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.ws.ViewerSocket; +import me.lucko.spark.proto.SparkProtos; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import me.lucko.spark.proto.SparkSamplerProtos.SocketChannelInfo; import java.util.Collection; +import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.function.Supplier; @@ -82,6 +84,13 @@ public interface Sampler { */ boolean isRunningInBackground(); + /** + * Gets the sampler type. + * + * @return the sampler type + */ + SamplerType getType(); + /** * Gets the sampler mode. * @@ -96,13 +105,20 @@ public interface Sampler { */ CompletableFuture getFuture(); + /** + * Exports the current set of window statistics. + * + * @return the window statistics + */ + Map exportWindowStatistics(); + // Methods used to export the sampler data to the web viewer. SamplerData toProto(SparkPlatform platform, ExportProps exportProps); final class ExportProps { private CommandSender.Data creator; private String comment; - private Supplier mergeMode; + private MergeStrategy mergeStrategy; private Supplier classSourceLookup; private SocketChannelInfo channelInfo; @@ -117,8 +133,8 @@ public String comment() { return this.comment; } - public Supplier mergeMode() { - return this.mergeMode; + public MergeStrategy mergeStrategy() { + return this.mergeStrategy; } public Supplier classSourceLookup() { @@ -139,8 +155,8 @@ public ExportProps comment(String comment) { return this; } - public ExportProps mergeMode(Supplier mergeMode) { - this.mergeMode = mergeMode; + public ExportProps mergeStrategy(MergeStrategy mergeStrategy) { + this.mergeStrategy = mergeStrategy; return this; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java index b6895ce8..3046d92d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java @@ -28,6 +28,7 @@ import me.lucko.spark.common.tick.TickHook; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; /** * Builds {@link Sampler} instances. @@ -44,7 +45,7 @@ public class SamplerBuilder { private long autoEndTime = -1; private boolean background = false; private ThreadDumper threadDumper = ThreadDumper.ALL; - private ThreadGrouper threadGrouper = ThreadGrouper.BY_NAME; + private Supplier threadGrouper = ThreadGrouper.BY_NAME; private int ticksOver = -1; private TickHook tickHook = null; @@ -80,7 +81,7 @@ public SamplerBuilder threadDumper(ThreadDumper threadDumper) { return this; } - public SamplerBuilder threadGrouper(ThreadGrouper threadGrouper) { + public SamplerBuilder threadGrouper(Supplier threadGrouper) { this.threadGrouper = threadGrouper; return this; } @@ -131,7 +132,7 @@ public Sampler start(SparkPlatform platform) throws UnsupportedOperationExceptio this.samplingInterval ); - SamplerSettings settings = new SamplerSettings(interval, this.threadDumper, this.threadGrouper, this.autoEndTime, this.background); + SamplerSettings settings = new SamplerSettings(interval, this.threadDumper, this.threadGrouper.get(), this.autoEndTime, this.background); Sampler sampler; if (this.mode == SamplerMode.ALLOCATION) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerType.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerType.java new file mode 100644 index 00000000..aad4b23b --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerType.java @@ -0,0 +1,47 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler; + +import me.lucko.spark.common.sampler.async.AsyncSampler; +import me.lucko.spark.common.sampler.java.JavaSampler; +import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; + +public enum SamplerType { + JAVA(JavaSampler.class, SamplerMetadata.SamplerEngine.JAVA), + ASYNC(AsyncSampler.class, SamplerMetadata.SamplerEngine.ASYNC); + + private final Class expectedClass; + private final SamplerMetadata.SamplerEngine proto; + + SamplerType(Class expectedClass, SamplerMetadata.SamplerEngine proto) { + this.expectedClass = expectedClass; + this.proto = proto; + } + + public Class implClass() { + return this.expectedClass; + } + + public SamplerMetadata.SamplerEngine asProto() { + return this.proto; + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java index b6cfbea6..7a791bcb 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java @@ -26,6 +26,7 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Supplier; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -64,7 +65,7 @@ public interface ThreadGrouper { * @param setting the config setting * @return the thread grouper */ - static ThreadGrouper parseConfigSetting(String setting) { + static Supplier parseConfigSetting(String setting) { switch (setting) { case "as-one": return AS_ONE; @@ -75,10 +76,15 @@ static ThreadGrouper parseConfigSetting(String setting) { } } + /** + * Supplier for {@link ByName} thread groupers. + */ + Supplier BY_NAME = ByName::new; + /** * Implementation of {@link ThreadGrouper} that just groups by thread name. */ - ThreadGrouper BY_NAME = new ThreadGrouper() { + class ByName implements ThreadGrouper { @Override public String getGroup(long threadId, String threadName) { return threadName; @@ -93,7 +99,12 @@ public String getLabel(String group) { public SamplerMetadata.DataAggregator.ThreadGrouper asProto() { return SamplerMetadata.DataAggregator.ThreadGrouper.BY_NAME; } - }; + } + + /** + * Supplier for {@link ByPool} thread groupers. + */ + Supplier BY_POOL = ByPool::new; /** * Implementation of {@link ThreadGrouper} that attempts to group by the name of the pool @@ -102,8 +113,8 @@ public SamplerMetadata.DataAggregator.ThreadGrouper asProto() { *

The regex pattern used to match pools expects a digit at the end of the thread name, * separated from the pool name with any of one or more of ' ', '-', or '#'.

*/ - ThreadGrouper BY_POOL = new ThreadGrouper() { - private /* static */ final Pattern pattern = Pattern.compile("^(.*?)[-# ]+\\d+$"); + class ByPool implements ThreadGrouper { + private static final Pattern PATTERN = Pattern.compile("^(.*?)[-# ]+\\d+$"); // thread id -> group private final Map cache = new ConcurrentHashMap<>(); @@ -117,7 +128,7 @@ public String getGroup(long threadId, String threadName) { return cached; } - Matcher matcher = this.pattern.matcher(threadName); + Matcher matcher = PATTERN.matcher(threadName); if (!matcher.matches()) { return threadName; } @@ -141,13 +152,18 @@ public String getLabel(String group) { public SamplerMetadata.DataAggregator.ThreadGrouper asProto() { return SamplerMetadata.DataAggregator.ThreadGrouper.BY_POOL; } - }; + } + + /** + * Supplier for {@link AsOne} thread groupers. + */ + Supplier AS_ONE = AsOne::new; /** * Implementation of {@link ThreadGrouper} which groups all threads as one, under * the name "All". */ - ThreadGrouper AS_ONE = new ThreadGrouper() { + class AsOne implements ThreadGrouper { private final Set seen = ConcurrentHashMap.newKeySet(); @Override @@ -165,6 +181,6 @@ public String getLabel(String group) { public SamplerMetadata.DataAggregator.ThreadGrouper asProto() { return SamplerMetadata.DataAggregator.ThreadGrouper.AS_ONE; } - }; + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java index b9a80e04..484493a3 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java @@ -33,7 +33,7 @@ public class AsyncDataAggregator extends AbstractDataAggregator { /** A describer for async-profiler stack trace elements. */ private static final StackTraceNode.Describer STACK_TRACE_DESCRIBER = (element, parent) -> - new StackTraceNode.Description(element.getClassName(), element.getMethodName(), element.getMethodDescription()); + new StackTraceNode.AsyncDescription(element.getClassName(), element.getMethodName(), element.getMethodDescription()); protected AsyncDataAggregator(ThreadGrouper threadGrouper) { super(threadGrouper); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncNodeExporter.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncNodeExporter.java new file mode 100644 index 00000000..ef68c460 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncNodeExporter.java @@ -0,0 +1,63 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.async; + +import me.lucko.spark.common.sampler.node.StackTraceNode; +import me.lucko.spark.common.sampler.node.exporter.AbstractNodeExporter; +import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; +import me.lucko.spark.proto.SparkSamplerProtos; + +import java.util.Collection; + +/** + * Node exporter for the {@link AsyncSampler}. + */ +public class AsyncNodeExporter extends AbstractNodeExporter { + public AsyncNodeExporter(ProtoTimeEncoder timeEncoder) { + super(timeEncoder); + } + + @Override + protected SparkSamplerProtos.StackTraceNode export(StackTraceNode stackTraceNode, Iterable childrenRefs) { + SparkSamplerProtos.StackTraceNode.Builder proto = SparkSamplerProtos.StackTraceNode.newBuilder() + .setClassName(stackTraceNode.getClassName()) + .setMethodName(stackTraceNode.getMethodName()); + + double[] times = stackTraceNode.encodeTimesForProto(this.timeEncoder); + for (double time : times) { + proto.addTimes(time); + } + + String methodDescription = stackTraceNode.getMethodDescription(); + if (methodDescription != null) { + proto.setMethodDesc(methodDescription); + } + + proto.addAllChildrenRefs(childrenRefs); + + return proto.build(); + } + + @Override + protected Collection exportChildren(Collection children) { + return children; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java index 5bee56f0..84aaa954 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java @@ -23,9 +23,7 @@ import com.google.common.collect.ImmutableTable; import com.google.common.collect.Table; import com.google.common.io.ByteStreams; - import me.lucko.spark.common.SparkPlatform; - import one.profiler.AsyncProfiler; import one.profiler.Events; @@ -75,14 +73,10 @@ public static synchronized AsyncProfilerAccess getInstance(SparkPlatform platfor try { profiler = load(platform); - if (isEventSupported(profiler, ProfilingEvent.ALLOC, false)) { allocationProfilingEvent = ProfilingEvent.ALLOC; } - - if (isEventSupported(profiler, ProfilingEvent.CPU, false)) { - profilingEvent = ProfilingEvent.CPU; - } else if (isEventSupported(profiler, ProfilingEvent.WALL, true)) { + if (isEventSupported(profiler, ProfilingEvent.WALL, true)) { profilingEvent = ProfilingEvent.WALL; } } catch (Exception e) { @@ -116,13 +110,19 @@ public boolean checkSupported(SparkPlatform platform) { if (this.setupException instanceof UnsupportedSystemException) { platform.getPlugin().log(Level.INFO, "The async-profiler engine is not supported for your os/arch (" + this.setupException.getMessage() + "), so the built-in Java engine will be used instead."); + } else if (this.setupException instanceof UnsupportedJvmException) { + platform.getPlugin().log(Level.INFO, "The async-profiler engine is not supported for your JVM (" + + this.setupException.getMessage() + "), so the built-in Java engine will be used instead."); } else if (this.setupException instanceof NativeLoadingException && this.setupException.getCause().getMessage().contains("libstdc++")) { platform.getPlugin().log(Level.WARNING, "Unable to initialise the async-profiler engine because libstdc++ is not installed."); platform.getPlugin().log(Level.WARNING, "Please see here for more information: https://spark.lucko.me/docs/misc/Using-async-profiler#install-libstdc"); } else { - platform.getPlugin().log(Level.WARNING, "Unable to initialise the async-profiler engine: " + this.setupException.getMessage()); + String error = this.setupException.getMessage(); + if (this.setupException.getCause() != null) { + error += " (" + this.setupException.getCause().getMessage() + ")"; + } + platform.getPlugin().log(Level.WARNING, "Unable to initialise the async-profiler engine: " + error); platform.getPlugin().log(Level.WARNING, "Please see here for more information: https://spark.lucko.me/docs/misc/Using-async-profiler"); - this.setupException.printStackTrace(); } } @@ -142,6 +142,12 @@ private static AsyncProfiler load(SparkPlatform platform) throws Exception { // check compatibility String os = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", ""); String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT); + String jvm = System.getProperty("java.vm.name"); + + // openj9 not supported by async-profiler at the moment + if (jvm.contains("OpenJ9")) { + throw new UnsupportedJvmException(jvm); + } if (os.equals("linux") && arch.equals("amd64") && isLinuxMusl()) { arch = "amd64-musl"; @@ -161,7 +167,7 @@ private static AsyncProfiler load(SparkPlatform platform) throws Exception { } // extract the profiler binary from the spark jar file - String resource = "spark/" + libPath + "/libasyncProfiler.so"; + String resource = "spark-native/" + libPath + "/libasyncProfiler.so"; URL profilerResource = AsyncProfilerAccess.class.getClassLoader().getResource(resource); if (profilerResource == null) { throw new IllegalStateException("Could not find " + resource + " in spark jar file"); @@ -203,8 +209,7 @@ private static boolean isEventSupported(AsyncProfiler profiler, ProfilingEvent e return false; } - enum ProfilingEvent { - CPU(Events.CPU), + public enum ProfilingEvent { WALL(Events.WALL), ALLOC(Events.ALLOC); @@ -226,6 +231,12 @@ public UnsupportedSystemException(String os, String arch) { } } + private static final class UnsupportedJvmException extends UnsupportedOperationException { + public UnsupportedJvmException(String jvm) { + super(jvm); + } + } + private static final class NativeLoadingException extends RuntimeException { public NativeLoadingException(Throwable cause) { super("A runtime error occurred whilst loading the native library", cause); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java index 2fd304c1..39c4eb20 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java @@ -21,11 +21,9 @@ package me.lucko.spark.common.sampler.async; import com.google.common.collect.ImmutableList; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.sampler.async.jfr.JfrReader; - import one.profiler.AsyncProfiler; import java.io.IOException; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index 961c3e99..994c03bc 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -21,11 +21,11 @@ package me.lucko.spark.common.sampler.async; import com.google.common.util.concurrent.ThreadFactoryBuilder; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.sampler.AbstractSampler; import me.lucko.spark.common.sampler.SamplerMode; import me.lucko.spark.common.sampler.SamplerSettings; +import me.lucko.spark.common.sampler.SamplerType; import me.lucko.spark.common.sampler.window.ProfilingWindowUtils; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.SparkThreadFactory; @@ -210,6 +210,11 @@ public void attachSocket(ViewerSocket socket) { } } + @Override + public SamplerType getType() { + return SamplerType.ASYNC; + } + @Override public SamplerMode getMode() { return this.sampleCollector.getMode(); @@ -222,7 +227,7 @@ public SamplerData toProto(SparkPlatform platform, ExportProps exportProps) { proto.setChannelInfo(exportProps.channelInfo()); } writeMetadataToProto(proto, platform, exportProps.creator(), exportProps.comment(), this.dataAggregator); - writeDataToProto(proto, this.dataAggregator, exportProps.mergeMode().get(), exportProps.classSourceLookup().get()); + writeDataToProto(proto, this.dataAggregator, AsyncNodeExporter::new, exportProps.classSourceLookup().get(), platform::createClassFinder); return proto.build(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/SampleCollector.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/SampleCollector.java index 6054b915..cd3f7503 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/SampleCollector.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/SampleCollector.java @@ -21,7 +21,6 @@ package me.lucko.spark.common.sampler.async; import com.google.common.collect.ImmutableList; - import me.lucko.spark.common.sampler.SamplerMode; import me.lucko.spark.common.sampler.async.AsyncProfilerAccess.ProfilingEvent; import me.lucko.spark.common.sampler.async.jfr.JfrReader.AllocationSample; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java index c51ec052..5b6a4705 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java @@ -39,7 +39,7 @@ public abstract class JavaDataAggregator extends AbstractDataAggregator { /** A describer for java.lang.StackTraceElement */ private static final StackTraceNode.Describer STACK_TRACE_DESCRIBER = (element, parent) -> { int parentLineNumber = parent == null ? StackTraceNode.NULL_LINE_NUMBER : parent.getLineNumber(); - return new StackTraceNode.Description(element.getClassName(), element.getMethodName(), element.getLineNumber(), parentLineNumber); + return new StackTraceNode.JavaDescription(element.getClassName(), element.getMethodName(), element.getLineNumber(), parentLineNumber); }; /** The worker pool for inserting stack nodes */ @@ -99,7 +99,7 @@ public List exportData() { return super.exportData(); } - private static boolean isSleeping(ThreadInfo thread) { + static boolean isSleeping(ThreadInfo thread) { if (thread.getThreadState() == Thread.State.WAITING || thread.getThreadState() == Thread.State.TIMED_WAITING) { return true; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaNodeExporter.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaNodeExporter.java new file mode 100644 index 00000000..c1100860 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaNodeExporter.java @@ -0,0 +1,97 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.java; + +import me.lucko.spark.common.sampler.node.StackTraceNode; +import me.lucko.spark.common.sampler.node.exporter.AbstractNodeExporter; +import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; +import me.lucko.spark.common.util.MethodDisambiguator; +import me.lucko.spark.proto.SparkSamplerProtos; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +/** + * Node exporter for the {@link JavaSampler}. + */ +public class JavaNodeExporter extends AbstractNodeExporter { + private final MergeStrategy mergeStrategy; + private final MethodDisambiguator methodDisambiguator; + + public JavaNodeExporter(ProtoTimeEncoder timeEncoder, MergeStrategy mergeStrategy, MethodDisambiguator methodDisambiguator) { + super(timeEncoder); + this.mergeStrategy = mergeStrategy; + this.methodDisambiguator = methodDisambiguator; + } + + protected SparkSamplerProtos.StackTraceNode export(StackTraceNode stackTraceNode, Iterable childrenRefs) { + SparkSamplerProtos.StackTraceNode.Builder proto = SparkSamplerProtos.StackTraceNode.newBuilder() + .setClassName(stackTraceNode.getClassName()) + .setMethodName(stackTraceNode.getMethodName()); + + double[] times = stackTraceNode.encodeTimesForProto(this.timeEncoder); + for (double time : times) { + proto.addTimes(time); + } + + int lineNumber = stackTraceNode.getLineNumber(); + if (lineNumber >= 0) { + proto.setLineNumber(lineNumber); + } + + if (this.mergeStrategy.separateParentCalls()) { + int parentLineNumber = stackTraceNode.getParentLineNumber(); + if (parentLineNumber >= 0) { + proto.setParentLineNumber(parentLineNumber); + } + } + + this.methodDisambiguator.disambiguate(stackTraceNode) + .map(MethodDisambiguator.MethodDescription::getDescription) + .ifPresent(proto::setMethodDesc); + + proto.addAllChildrenRefs(childrenRefs); + + return proto.build(); + } + + @Override + protected Collection exportChildren(Collection children) { + if (children.isEmpty()) { + return children; + } + + List list = new ArrayList<>(children.size()); + + outer: + for (StackTraceNode child : children) { + for (StackTraceNode other : list) { + if (this.mergeStrategy.shouldMerge(this.methodDisambiguator, other, child)) { + other.merge(child); + continue outer; + } + } + list.add(child); + } + return list; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index e29619b5..050c5b4f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -21,14 +21,15 @@ package me.lucko.spark.common.sampler.java; import com.google.common.util.concurrent.ThreadFactoryBuilder; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.sampler.AbstractSampler; import me.lucko.spark.common.sampler.SamplerMode; import me.lucko.spark.common.sampler.SamplerSettings; +import me.lucko.spark.common.sampler.SamplerType; import me.lucko.spark.common.sampler.window.ProfilingWindowUtils; import me.lucko.spark.common.sampler.window.WindowStatisticsCollector; import me.lucko.spark.common.tick.TickHook; +import me.lucko.spark.common.util.MethodDisambiguator; import me.lucko.spark.common.util.SparkThreadFactory; import me.lucko.spark.common.ws.ViewerSocket; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; @@ -193,11 +194,20 @@ public SamplerData toProto(SparkPlatform platform, ExportProps exportProps) { if (exportProps.channelInfo() != null) { proto.setChannelInfo(exportProps.channelInfo()); } + writeMetadataToProto(proto, platform, exportProps.creator(), exportProps.comment(), this.dataAggregator); - writeDataToProto(proto, this.dataAggregator, exportProps.mergeMode().get(), exportProps.classSourceLookup().get()); + + MethodDisambiguator methodDisambiguator = new MethodDisambiguator(platform.createClassFinder()); + writeDataToProto(proto, this.dataAggregator, timeEncoder -> new JavaNodeExporter(timeEncoder, exportProps.mergeStrategy(), methodDisambiguator), exportProps.classSourceLookup().get(), platform::createClassFinder); + return proto.build(); } + @Override + public SamplerType getType() { + return SamplerType.JAVA; + } + @Override public SamplerMode getMode() { return SamplerMode.EXECUTION; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/MergeMode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/MergeStrategy.java similarity index 64% rename from spark-common/src/main/java/me/lucko/spark/common/sampler/node/MergeMode.java rename to spark-common/src/main/java/me/lucko/spark/common/sampler/java/MergeStrategy.java index 18a0ed31..eac1c6fc 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/MergeMode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/MergeStrategy.java @@ -18,37 +18,27 @@ * along with this program. If not, see . */ -package me.lucko.spark.common.sampler.node; +package me.lucko.spark.common.sampler.java; +import me.lucko.spark.common.sampler.node.StackTraceNode; import me.lucko.spark.common.util.MethodDisambiguator; import java.util.Objects; /** - * Function to determine if {@link StackTraceNode}s should be merged. + * Strategy used to determine if {@link StackTraceNode}s should be merged. */ -public final class MergeMode { +public enum MergeStrategy { - public static MergeMode sameMethod(MethodDisambiguator methodDisambiguator) { - return new MergeMode(methodDisambiguator, false); - } - - public static MergeMode separateParentCalls(MethodDisambiguator methodDisambiguator) { - return new MergeMode(methodDisambiguator, true); - } + SAME_METHOD(false), + SEPARATE_PARENT_CALLS(true); - private final MethodDisambiguator methodDisambiguator; private final boolean separateParentCalls; - MergeMode(MethodDisambiguator methodDisambiguator, boolean separateParentCalls) { - this.methodDisambiguator = methodDisambiguator; + MergeStrategy(boolean separateParentCalls) { this.separateParentCalls = separateParentCalls; } - public MethodDisambiguator getMethodDisambiguator() { - return this.methodDisambiguator; - } - public boolean separateParentCalls() { return this.separateParentCalls; } @@ -56,11 +46,12 @@ public boolean separateParentCalls() { /** * Test if two stack trace nodes should be considered the same and merged. * + * @param disambiguator the method disambiguator * @param n1 the first node * @param n2 the second node * @return if the nodes should be merged */ - public boolean shouldMerge(StackTraceNode n1, StackTraceNode n2) { + public boolean shouldMerge(MethodDisambiguator disambiguator, StackTraceNode n1, StackTraceNode n2) { // are the class names the same? if (!n1.getClassName().equals(n2.getClassName())) { return false; @@ -77,8 +68,8 @@ public boolean shouldMerge(StackTraceNode n1, StackTraceNode n2) { } // are the method descriptions the same? (is it the same method?) - String desc1 = this.methodDisambiguator.disambiguate(n1).map(MethodDisambiguator.MethodDescription::getDesc).orElse(null); - String desc2 = this.methodDisambiguator.disambiguate(n2).map(MethodDisambiguator.MethodDescription::getDesc).orElse(null); + String desc1 = disambiguator.disambiguate(n1).map(MethodDisambiguator.MethodDescription::getDescription).orElse(null); + String desc2 = disambiguator.disambiguate(n2).map(MethodDisambiguator.MethodDescription::getDescription).orElse(null); if (desc1 == null && desc2 == null) { return true; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java index 08cb7193..f24af3f3 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java @@ -110,7 +110,7 @@ private void pushCurrentTick(Executor executor) { } // approximate how long the tick lasted - int tickLengthMicros = currentData.getList().size() * this.interval; + int tickLengthMicros = currentData.sizeWithoutTrailingSleeping() * this.interval; // don't push data below the threshold if (tickLengthMicros < this.tickLengthThreshold) { @@ -151,6 +151,16 @@ public List getList() { return this.list; } + public int sizeWithoutTrailingSleeping() { + // find the last index at which the thread wasn't sleeping + for (int i = this.list.size() - 1; i >= 0; i--) { + if (!isSleeping(this.list.get(i))) { + return i + 1; // add one to go from index to size + } + } + return 0; + } + public void addData(ThreadInfo data) { this.list.add(data); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java index 163365cb..d3b77b41 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java @@ -22,10 +22,7 @@ import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; -import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; -import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; @@ -86,7 +83,7 @@ public boolean removeTimeWindows(IntPredicate predicate) { * * @return the total times */ - protected double[] encodeTimesForProto(ProtoTimeEncoder encoder) { + public double[] encodeTimesForProto(ProtoTimeEncoder encoder) { return encoder.encode(this.times); } @@ -107,35 +104,11 @@ protected StackTraceNode resolveChild(StackTraceNode.Description description) { * * @param other the other node */ - protected void merge(AbstractNode other) { + public void merge(AbstractNode other) { other.times.forEach((key, value) -> getTimeAccumulator(key).add(value.longValue())); for (Map.Entry child : other.children.entrySet()) { resolveChild(child.getKey()).merge(child.getValue()); } } - protected List exportChildren(MergeMode mergeMode) { - if (this.children.isEmpty()) { - return Collections.emptyList(); - } - - List list = new ArrayList<>(this.children.size()); - - outer: - for (StackTraceNode child : this.children.values()) { - // attempt to find an existing node we can merge into - for (StackTraceNode other : list) { - if (mergeMode.shouldMerge(other, child)) { - other.merge(child); - continue outer; - } - } - - // just add - list.add(child); - } - - return list; - } - } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java index c0dcc5bd..27cfa540 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java @@ -20,10 +20,6 @@ package me.lucko.spark.common.sampler.node; -import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; -import me.lucko.spark.common.util.MethodDisambiguator; -import me.lucko.spark.proto.SparkSamplerProtos; - import org.checkerframework.checker.nullness.qual.Nullable; import java.util.Objects; @@ -46,58 +42,33 @@ public StackTraceNode(Description description) { } public String getClassName() { - return this.description.className; + return this.description.className(); } public String getMethodName() { - return this.description.methodName; + return this.description.methodName(); } public String getMethodDescription() { - return this.description.methodDescription; + return this.description instanceof AsyncDescription + ? ((AsyncDescription) this.description).methodDescription() + : null; } public int getLineNumber() { - return this.description.lineNumber; + return this.description instanceof JavaDescription + ? ((JavaDescription) this.description).lineNumber() + : NULL_LINE_NUMBER; } public int getParentLineNumber() { - return this.description.parentLineNumber; - } - - public SparkSamplerProtos.StackTraceNode toProto(MergeMode mergeMode, ProtoTimeEncoder timeEncoder, Iterable childrenRefs) { - SparkSamplerProtos.StackTraceNode.Builder proto = SparkSamplerProtos.StackTraceNode.newBuilder() - .setClassName(this.description.className) - .setMethodName(this.description.methodName); - - double[] times = encodeTimesForProto(timeEncoder); - for (double time : times) { - proto.addTimes(time); - } - - if (this.description.lineNumber >= 0) { - proto.setLineNumber(this.description.lineNumber); - } - - if (mergeMode.separateParentCalls() && this.description.parentLineNumber >= 0) { - proto.setParentLineNumber(this.description.parentLineNumber); - } - - if (this.description.methodDescription != null) { - proto.setMethodDesc(this.description.methodDescription); - } else { - mergeMode.getMethodDisambiguator().disambiguate(this) - .map(MethodDisambiguator.MethodDescription::getDesc) - .ifPresent(proto::setMethodDesc); - } - - proto.addAllChildrenRefs(childrenRefs); - - return proto.build(); + return this.description instanceof JavaDescription + ? ((JavaDescription) this.description).parentLineNumber() + : NULL_LINE_NUMBER; } /** - * Function to construct a {@link StackTraceNode.Description} from a stack trace element + * Function to construct a {@link Description} from a stack trace element * of type {@code T}. * * @param the stack trace element type, e.g. {@link java.lang.StackTraceElement} @@ -115,53 +86,101 @@ public interface Describer { Description describe(T element, @Nullable T parent); } - /** - * Encapsulates the attributes of a {@link StackTraceNode}. - */ - public static final class Description { + public interface Description { + String className(); + + String methodName(); + } + + public static final class AsyncDescription implements Description { private final String className; private final String methodName; - - // async-profiler private final String methodDescription; - // Java + private final int hash; + + public AsyncDescription(String className, String methodName, String methodDescription) { + this.className = className; + this.methodName = methodName; + this.methodDescription = methodDescription; + this.hash = Objects.hash(this.className, this.methodName, this.methodDescription); + } + + @Override + public String className() { + return this.className; + } + + @Override + public String methodName() { + return this.methodName; + } + + public String methodDescription() { + return this.methodDescription; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AsyncDescription description = (AsyncDescription) o; + return this.hash == description.hash && + this.className.equals(description.className) && + this.methodName.equals(description.methodName) && + Objects.equals(this.methodDescription, description.methodDescription); + } + + @Override + public int hashCode() { + return this.hash; + } + } + + public static final class JavaDescription implements Description { + private final String className; + private final String methodName; private final int lineNumber; private final int parentLineNumber; private final int hash; - // Constructor used by the Java sampler - public Description(String className, String methodName, int lineNumber, int parentLineNumber) { + public JavaDescription(String className, String methodName, int lineNumber, int parentLineNumber) { this.className = className; this.methodName = methodName; - this.methodDescription = null; this.lineNumber = lineNumber; this.parentLineNumber = parentLineNumber; this.hash = Objects.hash(this.className, this.methodName, this.lineNumber, this.parentLineNumber); } - // Constructor used by the async-profiler sampler - public Description(String className, String methodName, String methodDescription) { - this.className = className; - this.methodName = methodName; - this.methodDescription = methodDescription; - this.lineNumber = StackTraceNode.NULL_LINE_NUMBER; - this.parentLineNumber = StackTraceNode.NULL_LINE_NUMBER; - this.hash = Objects.hash(this.className, this.methodName, this.methodDescription); + @Override + public String className() { + return this.className; + } + + @Override + public String methodName() { + return this.methodName; + } + + public int lineNumber() { + return this.lineNumber; + } + + public int parentLineNumber() { + return this.parentLineNumber; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - Description description = (Description) o; + JavaDescription description = (JavaDescription) o; return this.hash == description.hash && this.lineNumber == description.lineNumber && this.parentLineNumber == description.parentLineNumber && this.className.equals(description.className) && - this.methodName.equals(description.methodName) && - Objects.equals(this.methodDescription, description.methodDescription); + this.methodName.equals(description.methodName); } @Override diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java index 37ff359d..f934e535 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java @@ -20,16 +20,9 @@ package me.lucko.spark.common.sampler.node; -import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; -import me.lucko.spark.common.util.IndexedListBuilder; -import me.lucko.spark.proto.SparkSamplerProtos; - import java.util.ArrayDeque; import java.util.Collection; -import java.util.Deque; import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; import java.util.Queue; import java.util.function.IntPredicate; @@ -134,92 +127,4 @@ public boolean removeTimeWindowsRecursively(IntPredicate predicate) { return getTimeWindows().isEmpty(); } - public SparkSamplerProtos.ThreadNode toProto(MergeMode mergeMode, ProtoTimeEncoder timeEncoder) { - SparkSamplerProtos.ThreadNode.Builder proto = SparkSamplerProtos.ThreadNode.newBuilder() - .setName(getThreadLabel()); - - double[] times = encodeTimesForProto(timeEncoder); - for (double time : times) { - proto.addTimes(time); - } - - // When converting to a proto, we change the data structure from a recursive tree to an array. - // Effectively, instead of: - // - // { - // data: 'one', - // children: [ - // { - // data: 'two', - // children: [{ data: 'four' }] - // }, - // { data: 'three' } - // ] - // } - // - // we transmit: - // - // [ - // { data: 'one', children: [1, 2] }, - // { data: 'two', children: [3] } - // { data: 'three', children: [] } - // { data: 'four', children: [] } - // ] - // - - // the flattened array of nodes - IndexedListBuilder nodesArray = new IndexedListBuilder<>(); - - // Perform a depth-first post order traversal of the tree - Deque stack = new ArrayDeque<>(); - - // push the thread node's children to the stack - List childrenRefs = new LinkedList<>(); - for (StackTraceNode child : exportChildren(mergeMode)) { - stack.push(new Node(child, childrenRefs)); - } - - Node node; - while (!stack.isEmpty()) { - node = stack.peek(); - - // on the first visit, just push this node's children and leave it on the stack - if (node.firstVisit) { - for (StackTraceNode child : node.stackTraceNode.exportChildren(mergeMode)) { - stack.push(new Node(child, node.childrenRefs)); - } - node.firstVisit = false; - continue; - } - - // convert StackTraceNode to a proto - // - at this stage, we have already visited this node's children - // - the refs for each child are stored in node.childrenRefs - SparkSamplerProtos.StackTraceNode childProto = node.stackTraceNode.toProto(mergeMode, timeEncoder, node.childrenRefs); - - // add the child proto to the nodes array, and record the ref in the parent - int childIndex = nodesArray.add(childProto); - node.parentChildrenRefs.add(childIndex); - - // pop from the stack - stack.pop(); - } - - proto.addAllChildrenRefs(childrenRefs); - proto.addAllChildren(nodesArray.build()); - - return proto.build(); - } - - private static final class Node { - private final StackTraceNode stackTraceNode; - private boolean firstVisit = true; - private final List childrenRefs = new LinkedList<>(); - private final List parentChildrenRefs; - - private Node(StackTraceNode node, List parentChildrenRefs) { - this.stackTraceNode = node; - this.parentChildrenRefs = parentChildrenRefs; - } - } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/exporter/AbstractNodeExporter.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/exporter/AbstractNodeExporter.java new file mode 100644 index 00000000..bc548572 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/exporter/AbstractNodeExporter.java @@ -0,0 +1,136 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.node.exporter; + +import me.lucko.spark.common.sampler.node.StackTraceNode; +import me.lucko.spark.common.sampler.node.ThreadNode; +import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; +import me.lucko.spark.common.util.IndexedListBuilder; +import me.lucko.spark.proto.SparkSamplerProtos; + +import java.util.ArrayDeque; +import java.util.Collection; +import java.util.Deque; +import java.util.LinkedList; +import java.util.List; + +public abstract class AbstractNodeExporter implements NodeExporter { + protected final ProtoTimeEncoder timeEncoder; + + protected AbstractNodeExporter(ProtoTimeEncoder timeEncoder) { + this.timeEncoder = timeEncoder; + } + + @Override + public SparkSamplerProtos.ThreadNode export(ThreadNode threadNode) { + SparkSamplerProtos.ThreadNode.Builder proto = SparkSamplerProtos.ThreadNode.newBuilder() + .setName(threadNode.getThreadLabel()); + + double[] times = threadNode.encodeTimesForProto(this.timeEncoder); + for (double time : times) { + proto.addTimes(time); + } + + // When converting to a proto, we change the data structure from a recursive tree to an array. + // Effectively, instead of: + // + // { + // data: 'one', + // children: [ + // { + // data: 'two', + // children: [{ data: 'four' }] + // }, + // { data: 'three' } + // ] + // } + // + // we transmit: + // + // [ + // { data: 'one', children: [1, 2] }, + // { data: 'two', children: [3] } + // { data: 'three', children: [] } + // { data: 'four', children: [] } + // ] + // + + // the flattened array of nodes + IndexedListBuilder nodesArray = new IndexedListBuilder<>(); + + // Perform a depth-first post order traversal of the tree + Deque stack = new ArrayDeque<>(); + + // push the thread node's children to the stack + List childrenRefs = new LinkedList<>(); + for (StackTraceNode child : exportChildren(threadNode.getChildren())) { + stack.push(new Node(child, childrenRefs)); + } + + Node node; + while (!stack.isEmpty()) { + node = stack.peek(); + + // on the first visit, just push this node's children and leave it on the stack + if (node.firstVisit) { + for (StackTraceNode child : exportChildren(node.stackTraceNode.getChildren())) { + stack.push(new Node(child, node.childrenRefs)); + } + node.firstVisit = false; + continue; + } + + // convert StackTraceNode to a proto + // - at this stage, we have already visited this node's children + // - the refs for each child are stored in node.childrenRefs + SparkSamplerProtos.StackTraceNode childProto = this.export(node.stackTraceNode, node.childrenRefs); + + // add the child proto to the nodes array, and record the ref in the parent + int childIndex = nodesArray.add(childProto); + node.parentChildrenRefs.add(childIndex); + + // pop from the stack + stack.pop(); + } + + proto.addAllChildrenRefs(childrenRefs); + proto.addAllChildren(nodesArray.build()); + + return proto.build(); + } + + protected abstract SparkSamplerProtos.StackTraceNode export(StackTraceNode stackTraceNode, Iterable childrenRefs); + + protected abstract Collection exportChildren(Collection children); + + private static final class Node { + private final StackTraceNode stackTraceNode; + private boolean firstVisit = true; + private final List childrenRefs = new LinkedList<>(); + private final List parentChildrenRefs; + + private Node(StackTraceNode node, List parentChildrenRefs) { + this.stackTraceNode = node; + this.parentChildrenRefs = parentChildrenRefs; + } + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/exporter/NodeExporter.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/exporter/NodeExporter.java new file mode 100644 index 00000000..b599fc01 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/exporter/NodeExporter.java @@ -0,0 +1,39 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.node.exporter; + +import me.lucko.spark.common.sampler.node.ThreadNode; +import me.lucko.spark.proto.SparkSamplerProtos; + +/** + * Exports a {@link ThreadNode} to a protobuf message. + */ +public interface NodeExporter { + + /** + * Exports a {@link ThreadNode} to a protobuf message. + * + * @param threadNode the thread node + * @return the exported protobuf message + */ + SparkSamplerProtos.ThreadNode export(ThreadNode threadNode); + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java index ab63c003..a3b4f024 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java @@ -23,8 +23,7 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.sampler.node.StackTraceNode; import me.lucko.spark.common.sampler.node.ThreadNode; -import me.lucko.spark.common.util.ClassFinder; - +import me.lucko.spark.common.util.classfinder.ClassFinder; import org.checkerframework.checker.nullness.qual.Nullable; import java.io.IOException; @@ -43,6 +42,7 @@ import java.util.Objects; import java.util.Queue; import java.util.function.Function; +import java.util.function.Supplier; import java.util.stream.Collectors; /** @@ -204,11 +204,11 @@ interface Visitor { Map getLineSourceMapping(); } - static Visitor createVisitor(ClassSourceLookup lookup) { + static Visitor createVisitor(ClassSourceLookup lookup, Supplier classFinderSupplier) { if (lookup == ClassSourceLookup.NO_OP) { return NoOpVisitor.INSTANCE; // don't bother! } - return new VisitorImpl(lookup); + return new VisitorImpl(lookup, classFinderSupplier.get()); } enum NoOpVisitor implements Visitor { @@ -255,14 +255,15 @@ public Map getLineSourceMapping() { */ class VisitorImpl implements Visitor { private final ClassSourceLookup lookup; - private final ClassFinder classFinder = new ClassFinder(); + private final ClassFinder classFinder; private final SourcesMap classSources = new SourcesMap<>(Function.identity()); private final SourcesMap methodSources = new SourcesMap<>(MethodCall::toString); private final SourcesMap lineSources = new SourcesMap<>(MethodCallByLine::toString); - VisitorImpl(ClassSourceLookup lookup) { + VisitorImpl(ClassSourceLookup lookup, ClassFinder classFinder) { this.lookup = lookup; + this.classFinder = classFinder; } @Override @@ -288,7 +289,7 @@ private void visitStackNode(StackTraceNode node) { if (node.getMethodDescription() != null) { MethodCall methodCall = new MethodCall(node.getClassName(), node.getMethodName(), node.getMethodDescription()); this.methodSources.computeIfAbsent(methodCall, this.lookup::identify); - } else { + } else if (node.getLineNumber() != StackTraceNode.NULL_LINE_NUMBER) { MethodCallByLine methodCall = new MethodCallByLine(node.getClassName(), node.getMethodName(), node.getLineNumber()); this.lineSources.computeIfAbsent(methodCall, this.lookup::identify); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java index 0808d660..d023a68e 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java @@ -21,8 +21,7 @@ package me.lucko.spark.common.sampler.source; import com.google.common.collect.ImmutableList; - -import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; +import me.lucko.spark.proto.SparkProtos.PluginOrModMetadata; import java.util.Collection; import java.util.List; @@ -34,15 +33,16 @@ */ public class SourceMetadata { - public static List gather(Collection sources, Function nameFunction, Function versionFunction, Function authorFunction) { + public static List gather(Collection sources, Function name, Function version, Function author, Function description) { ImmutableList.Builder builder = ImmutableList.builder(); for (T source : sources) { - String name = nameFunction.apply(source); - String version = versionFunction.apply(source); - String author = authorFunction.apply(source); - - SourceMetadata metadata = new SourceMetadata(name, version, author); + SourceMetadata metadata = new SourceMetadata( + name.apply(source), + version.apply(source), + author.apply(source), + description.apply(source) + ); builder.add(metadata); } @@ -52,11 +52,13 @@ public static List gather(Collection sources, Function index in the keys array */ private final Map keysToIndex; - public ProtoTimeEncoder(LongToDoubleFunction valueTransformer, List sourceData) { + @VisibleForTesting + ProtoTimeEncoder(LongToDoubleFunction valueTransformer, IntStream keys) { this.valueTransformer = valueTransformer; - - // get an array of all keys that show up in the source data - this.keys = sourceData.stream() - .map(n -> n.getTimeWindows().stream().mapToInt(i -> i)) - .reduce(IntStream.empty(), IntStream::concat) - .distinct() - .sorted() - .toArray(); + this.keys = keys.distinct().sorted().toArray(); // construct a reverse index lookup this.keysToIndex = new HashMap<>(this.keys.length); @@ -61,6 +55,13 @@ public ProtoTimeEncoder(LongToDoubleFunction valueTransformer, List } } + public ProtoTimeEncoder(LongToDoubleFunction valueTransformer, List sourceData) { + this(valueTransformer, sourceData.stream() + .map(n -> n.getTimeWindows().stream().mapToInt(i -> i)) + .reduce(IntStream.empty(), IntStream::concat) + ); + } + /** * Gets an array of the keys that could be encoded by this encoder. * @@ -71,7 +72,7 @@ public int[] getKeys() { } /** - * Encode a {@link Dictionary} (map) of times/durations into a double array. + * Encode a map of times/durations into a double array. * * @param times a dictionary of times (unix-time millis -> duration in microseconds) * @return the times encoded as a double array diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/JavaVersion.java b/spark-common/src/main/java/me/lucko/spark/common/util/JavaVersion.java new file mode 100644 index 00000000..23120ea2 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/JavaVersion.java @@ -0,0 +1,47 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util; + +import org.jetbrains.annotations.VisibleForTesting; + +public enum JavaVersion { + ; + + private static final int JAVA_VERSION; + static { + JAVA_VERSION = parseJavaVersion(System.getProperty("java.version")); + } + + @VisibleForTesting + static int parseJavaVersion(String version) { + if (version.startsWith("1.")) { + // Java 8 and below + return Integer.parseInt(version.substring(2, 3)); + } else { + // Java 9 and above + return Integer.parseInt(version.split("\\.")[0]); + } + } + + public static int getJavaVersion() { + return JAVA_VERSION; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/MediaTypes.java b/spark-common/src/main/java/me/lucko/spark/common/util/MediaTypes.java index 2c495401..47a33955 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/MediaTypes.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/MediaTypes.java @@ -25,5 +25,6 @@ public enum MediaTypes { public static final String SPARK_SAMPLER_MEDIA_TYPE = "application/x-spark-sampler"; public static final String SPARK_HEAP_MEDIA_TYPE = "application/x-spark-heap"; + public static final String SPARK_HEALTH_MEDIA_TYPE = "application/x-spark-health"; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/MethodDisambiguator.java b/spark-common/src/main/java/me/lucko/spark/common/util/MethodDisambiguator.java index c03e7cb0..8553abbf 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/MethodDisambiguator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/MethodDisambiguator.java @@ -23,9 +23,8 @@ import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ListMultimap; - import me.lucko.spark.common.sampler.node.StackTraceNode; - +import me.lucko.spark.common.util.classfinder.ClassFinder; import org.objectweb.asm.ClassReader; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.Label; @@ -45,8 +44,13 @@ * to a method (method name + method description). */ public final class MethodDisambiguator { - private final Map cache = new ConcurrentHashMap<>(); - private final ClassFinder classFinder = new ClassFinder(); + private final ClassFinder classFinder; + private final Map cache; + + public MethodDisambiguator(ClassFinder classFinder) { + this.classFinder = classFinder; + this.cache = new ConcurrentHashMap<>(); + } public Optional disambiguate(StackTraceNode element) { String desc = element.getMethodDescription(); @@ -81,6 +85,29 @@ public Optional disambiguate(String className, String methodN } } + private ComputedClass compute(String className) throws IOException { + final ImmutableListMultimap.Builder descriptionsByName = ImmutableListMultimap.builder(); + final Map descriptionsByLine = new HashMap<>(); + + ClassReader classReader = getClassReader(className); + classReader.accept(new ClassVisitor(Opcodes.ASM7) { + @Override + public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { + MethodDescription description = new MethodDescription(name, descriptor); + descriptionsByName.put(name, description); + + return new MethodVisitor(Opcodes.ASM7) { + @Override + public void visitLineNumber(int line, Label start) { + descriptionsByLine.put(line, description); + } + }; + } + }, Opcodes.ASM7); + + return new ComputedClass(descriptionsByName.build(), ImmutableMap.copyOf(descriptionsByLine)); + } + private ClassReader getClassReader(String className) throws IOException { String resource = className.replace('.', '/') + ".class"; @@ -102,28 +129,6 @@ private ClassReader getClassReader(String className) throws IOException { throw new IOException("Unable to get resource: " + className); } - private ComputedClass compute(String className) throws IOException { - ImmutableListMultimap.Builder descriptionsByName = ImmutableListMultimap.builder(); - Map descriptionsByLine = new HashMap<>(); - - getClassReader(className).accept(new ClassVisitor(Opcodes.ASM7) { - @Override - public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { - MethodDescription description = new MethodDescription(name, descriptor); - descriptionsByName.put(name, description); - - return new MethodVisitor(Opcodes.ASM7) { - @Override - public void visitLineNumber(int line, Label start) { - descriptionsByLine.put(line, description); - } - }; - } - }, Opcodes.ASM7); - - return new ComputedClass(descriptionsByName.build(), ImmutableMap.copyOf(descriptionsByLine)); - } - private static final class ComputedClass { private static final ComputedClass EMPTY = new ComputedClass(ImmutableListMultimap.of(), ImmutableMap.of()); @@ -138,24 +143,24 @@ private ComputedClass(ListMultimap descriptionsByName public static final class MethodDescription { private final String name; - private final String desc; + private final String description; - private MethodDescription(String name, String desc) { + private MethodDescription(String name, String description) { this.name = name; - this.desc = desc; + this.description = description; } public String getName() { return this.name; } - public String getDesc() { - return this.desc; + public String getDescription() { + return this.description; } @Override public String toString() { - return this.name + this.desc; + return this.name + this.description; } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java b/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java index be5bbc28..b4acc7be 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java @@ -23,9 +23,9 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.monitor.cpu.CpuMonitor; import me.lucko.spark.common.monitor.tick.TickStatistics; - import net.kyori.adventure.text.Component; import net.kyori.adventure.text.TextComponent; +import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer; import net.kyori.adventure.text.serializer.legacy.LegacyComponentSerializer; import java.util.Locale; @@ -187,5 +187,13 @@ public static String resolveFormattingCode(SparkPlatform platform, String placeh } return LegacyComponentSerializer.legacySection().serialize(result); } + + public static String resolveComponentJson(SparkPlatform platform, String placeholder) { + TextComponent result = resolveComponent(platform, placeholder); + if (result == null) { + return null; + } + return GsonComponentSerializer.gson().serialize(result); + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/SparkStaticLogger.java b/spark-common/src/main/java/me/lucko/spark/common/util/SparkStaticLogger.java new file mode 100644 index 00000000..eb5f3163 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/SparkStaticLogger.java @@ -0,0 +1,61 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util; + +import java.util.logging.Level; + +/** + * Special logger for use by classes that don't easily have access to a + * {@link me.lucko.spark.common.SparkPlatform} instance. + * + *

This avoids warnings on platforms like Paper that get upset if plugins use + * {@link System#out} or {@link System#err}.

+ */ +public enum SparkStaticLogger { + ; + + private static Logger logger = null; + + public synchronized static void setLogger(Logger logger) { + if (SparkStaticLogger.logger == null) { + SparkStaticLogger.logger = logger; + } + } + + public static void log(Level level, String msg) { + Logger logger = SparkStaticLogger.logger; + if (logger == null) { + if (level.intValue() >= 1000) { + System.err.println(msg); + } else { + System.out.println(msg); + } + return; + } + + logger.log(level, msg); + } + + public interface Logger { + void log(Level level, String msg); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java b/spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java index 42dca12a..1d6971be 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java @@ -22,11 +22,12 @@ import java.util.concurrent.ThreadFactory; import java.util.concurrent.atomic.AtomicInteger; +import java.util.logging.Level; public class SparkThreadFactory implements ThreadFactory { public static final Thread.UncaughtExceptionHandler EXCEPTION_HANDLER = (t, e) -> { - System.err.println("Uncaught exception thrown by thread " + t.getName()); + SparkStaticLogger.log(Level.SEVERE, "Uncaught exception thrown by thread " + t.getName()); e.printStackTrace(); }; diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java b/spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java index b488f505..e7b8cca2 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java @@ -21,9 +21,7 @@ package me.lucko.spark.common.util; import com.google.common.base.Strings; - import me.lucko.spark.api.statistic.misc.DoubleAverageInfo; - import net.kyori.adventure.text.Component; import net.kyori.adventure.text.TextComponent; import net.kyori.adventure.text.format.TextColor; diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java b/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java index 91a474cd..01dfccf5 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java @@ -25,8 +25,11 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.FileSystems; +import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.FileAttribute; import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.PosixFilePermissions; @@ -58,16 +61,21 @@ public final class TemporaryFiles { private final Set files = Collections.synchronizedSet(new HashSet<>()); public TemporaryFiles(Path tmpDirectory) { - this.tmpDirectory = tmpDirectory; + boolean useOsTmpDir = Boolean.parseBoolean(System.getProperty("spark.useOsTmpDir", "false")); + if (useOsTmpDir) { + this.tmpDirectory = null; + } else { + this.tmpDirectory = init(tmpDirectory); + } } public Path create(String prefix, String suffix) throws IOException { Path file; - if (ensureDirectoryIsReady()) { + if (this.tmpDirectory == null) { + file = Files.createTempFile(prefix, suffix); + } else { String name = prefix + Long.toHexString(System.nanoTime()) + suffix; file = Files.createFile(this.tmpDirectory.resolve(name), OWNER_ONLY_FILE_PERMISSIONS); - } else { - file = Files.createTempFile(prefix, suffix); } return register(file); } @@ -92,19 +100,33 @@ public void deleteTemporaryFiles() { } } - private boolean ensureDirectoryIsReady() { - if (Boolean.parseBoolean(System.getProperty("spark.useOsTmpDir", "false"))) { - return false; - } - - if (Files.isDirectory(this.tmpDirectory)) { - return true; - } - + private static Path init(Path tmpDirectory) { try { - Files.createDirectories(this.tmpDirectory); - - Files.write(this.tmpDirectory.resolve("about.txt"), ImmutableList.of( + Files.createDirectories(tmpDirectory); + Path readmePath = tmpDirectory.resolve("about.txt"); + + Files.walkFileTree( + tmpDirectory, + new SimpleFileVisitor() { + @Override + public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { + if (!dir.equals(tmpDirectory)) { + Files.delete(dir); + } + return FileVisitResult.CONTINUE; + } + + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + if (!file.equals(readmePath)) { + Files.delete(file); + } + return FileVisitResult.CONTINUE; + } + } + ); + + Files.write(readmePath, ImmutableList.of( "# What is this directory?", "", "* In order to perform certain functions, spark sometimes needs to write temporary data to the disk. ", @@ -116,11 +138,10 @@ private boolean ensureDirectoryIsReady() { "", "tl;dr: spark uses this folder to store some temporary data." ), StandardCharsets.UTF_8); - - return true; } catch (IOException e) { - return false; + // ignore } + return tmpDirectory; } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/ClassFinder.java b/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/ClassFinder.java new file mode 100644 index 00000000..1ee75c66 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/ClassFinder.java @@ -0,0 +1,46 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util.classfinder; + +import com.google.common.collect.ImmutableList; +import org.checkerframework.checker.nullness.qual.Nullable; + +public interface ClassFinder { + + /** + * Creates a ClassFinder that combines the results of multiple other finders. + * + * @param finders the other class finders + * @return the combined class finder + */ + static ClassFinder combining(ClassFinder... finders) { + return new CombinedClassFinder(ImmutableList.copyOf(finders)); + } + + /** + * Attempts to find a class by name. + * + * @param className the name of the class + * @return the class, if found + */ + @Nullable Class findClass(String className); + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/CombinedClassFinder.java b/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/CombinedClassFinder.java new file mode 100644 index 00000000..ed63f36c --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/CombinedClassFinder.java @@ -0,0 +1,44 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util.classfinder; + +import org.checkerframework.checker.nullness.qual.Nullable; + +import java.util.List; + +class CombinedClassFinder implements ClassFinder { + private final List finders; + + CombinedClassFinder(List finders) { + this.finders = finders; + } + + @Override + public @Nullable Class findClass(String className) { + for (ClassFinder finder : this.finders) { + Class clazz = finder.findClass(className); + if (clazz != null) { + return clazz; + } + } + return null; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/FallbackClassFinder.java b/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/FallbackClassFinder.java new file mode 100644 index 00000000..dd3c9f00 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/FallbackClassFinder.java @@ -0,0 +1,40 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util.classfinder; + +import org.checkerframework.checker.nullness.qual.Nullable; + +/** + * Uses {@link Class#forName(String)} to find a class reference for given class names. + */ +public enum FallbackClassFinder implements ClassFinder { + INSTANCE; + + @Override + public @Nullable Class findClass(String className) { + try { + return Class.forName(className); + } catch (Throwable e) { + return null; + } + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/ClassFinder.java b/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/InstrumentationClassFinder.java similarity index 61% rename from spark-common/src/main/java/me/lucko/spark/common/util/ClassFinder.java rename to spark-common/src/main/java/me/lucko/spark/common/util/classfinder/InstrumentationClassFinder.java index f132613c..5f06d649 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/ClassFinder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/InstrumentationClassFinder.java @@ -18,32 +18,46 @@ * along with this program. If not, see . */ -package me.lucko.spark.common.util; +package me.lucko.spark.common.util.classfinder; +import me.lucko.spark.common.SparkPlugin; +import me.lucko.spark.common.util.JavaVersion; import net.bytebuddy.agent.ByteBuddyAgent; - import org.checkerframework.checker.nullness.qual.Nullable; import java.lang.instrument.Instrumentation; import java.util.HashMap; import java.util.Map; +import java.util.logging.Level; /** * Uses {@link Instrumentation} to find a class reference for given class names. * *

This is necessary as we don't always have access to the classloader for a given class.

*/ -public class ClassFinder { +public class InstrumentationClassFinder implements ClassFinder { - private final Map> classes = new HashMap<>(); + private static boolean warned = false; - public ClassFinder() { - Instrumentation instrumentation; + private static Instrumentation loadInstrumentation(SparkPlugin plugin) { + Instrumentation instrumentation = null; try { instrumentation = ByteBuddyAgent.install(); + if (!warned && JavaVersion.getJavaVersion() >= 21) { + warned = true; + plugin.log(Level.INFO, "If you see a warning above that says \"WARNING: A Java agent has been loaded dynamically\", it can be safely ignored."); + plugin.log(Level.INFO, "See here for more information: https://spark.lucko.me/docs/misc/Java-agent-warning"); + } } catch (Exception e) { - return; + // ignored } + return instrumentation; + } + + private final Map> classes = new HashMap<>(); + + public InstrumentationClassFinder(SparkPlugin plugin) { + Instrumentation instrumentation = loadInstrumentation(plugin); if (instrumentation == null) { return; } @@ -54,21 +68,9 @@ public ClassFinder() { } } + @Override public @Nullable Class findClass(String className) { - // try instrumentation - Class clazz = this.classes.get(className); - if (clazz != null) { - return clazz; - } - - // try Class.forName - try { - return Class.forName(className); - } catch (Throwable e) { - // ignore - } - - return null; + return this.classes.get(className); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/config/CombinedConfiguration.java b/spark-common/src/main/java/me/lucko/spark/common/util/config/CombinedConfiguration.java new file mode 100644 index 00000000..ff7388ae --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/config/CombinedConfiguration.java @@ -0,0 +1,132 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util.config; + +import com.google.common.collect.ImmutableList; + +import java.util.Collections; +import java.util.List; + +class CombinedConfiguration implements Configuration { + + private final List configurations; + + CombinedConfiguration(Configuration... configurations) { + this.configurations = ImmutableList.copyOf(configurations).reverse(); + } + + @Override + public void load() { + for (Configuration configuration : this.configurations) { + configuration.load(); + } + } + + @Override + public void save() { + for (Configuration configuration : this.configurations) { + configuration.save(); + } + } + + @Override + public String getString(String path, String def) { + String result = def; + for (Configuration configuration : this.configurations) { + result = configuration.getString(path, result); + } + return result; + } + + @Override + public boolean getBoolean(String path, boolean def) { + boolean result = def; + for (Configuration configuration : this.configurations) { + result = configuration.getBoolean(path, result); + } + return result; + } + + @Override + public int getInteger(String path, int def) { + int result = def; + for (Configuration configuration : this.configurations) { + result = configuration.getInteger(path, result); + } + return result; + } + + @Override + public List getStringList(String path) { + for (Configuration configuration : this.configurations) { + List result = configuration.getStringList(path); + if (!result.isEmpty()) { + return result; + } + } + return Collections.emptyList(); + } + + @Override + public void setString(String path, String value) { + for (Configuration configuration : this.configurations) { + configuration.setString(path, value); + } + } + + @Override + public void setBoolean(String path, boolean value) { + for (Configuration configuration : this.configurations) { + configuration.setBoolean(path, value); + } + } + + @Override + public void setInteger(String path, int value) { + for (Configuration configuration : this.configurations) { + configuration.setInteger(path, value); + } + } + + @Override + public void setStringList(String path, List value) { + for (Configuration configuration : this.configurations) { + configuration.setStringList(path, value); + } + } + + @Override + public boolean contains(String path) { + for (Configuration configuration : this.configurations) { + if (configuration.contains(path)) { + return true; + } + } + return false; + } + + @Override + public void remove(String path) { + for (Configuration configuration : this.configurations) { + configuration.remove(path); + } + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/config/Configuration.java b/spark-common/src/main/java/me/lucko/spark/common/util/config/Configuration.java new file mode 100644 index 00000000..c2c2d88b --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/config/Configuration.java @@ -0,0 +1,54 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util.config; + +import java.util.List; + +public interface Configuration { + + static Configuration combining(Configuration... configurations) { + return new CombinedConfiguration(configurations); + } + + void load(); + + void save(); + + String getString(String path, String def); + + boolean getBoolean(String path, boolean def); + + int getInteger(String path, int def); + + List getStringList(String path); + + void setString(String path, String value); + + void setBoolean(String path, boolean value); + + void setInteger(String path, int value); + + void setStringList(String path, List value); + + boolean contains(String path); + + void remove(String path); +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java b/spark-common/src/main/java/me/lucko/spark/common/util/config/FileConfiguration.java similarity index 93% rename from spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java rename to spark-common/src/main/java/me/lucko/spark/common/util/config/FileConfiguration.java index 586a8456..72a4681f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/config/FileConfiguration.java @@ -18,7 +18,7 @@ * along with this program. If not, see . */ -package me.lucko.spark.common.util; +package me.lucko.spark.common.util.config; import com.google.gson.Gson; import com.google.gson.GsonBuilder; @@ -37,17 +37,18 @@ import java.util.Collections; import java.util.List; -public final class Configuration { +public class FileConfiguration implements Configuration { private static final Gson GSON = new GsonBuilder().setPrettyPrinting().create(); private final Path file; private JsonObject root; - public Configuration(Path file) { + public FileConfiguration(Path file) { this.file = file; load(); } + @Override public void load() { JsonObject root = null; if (Files.exists(this.file)) { @@ -64,6 +65,7 @@ public void load() { this.root = root; } + @Override public void save() { try { Files.createDirectories(this.file.getParent()); @@ -78,6 +80,7 @@ public void save() { } } + @Override public String getString(String path, String def) { JsonElement el = this.root.get(path); if (el == null || !el.isJsonPrimitive()) { @@ -87,6 +90,7 @@ public String getString(String path, String def) { return el.getAsJsonPrimitive().getAsString(); } + @Override public boolean getBoolean(String path, boolean def) { JsonElement el = this.root.get(path); if (el == null || !el.isJsonPrimitive()) { @@ -97,6 +101,7 @@ public boolean getBoolean(String path, boolean def) { return val.isBoolean() ? val.getAsBoolean() : def; } + @Override public int getInteger(String path, int def) { JsonElement el = this.root.get(path); if (el == null || !el.isJsonPrimitive()) { @@ -107,6 +112,7 @@ public int getInteger(String path, int def) { return val.isNumber() ? val.getAsInt() : def; } + @Override public List getStringList(String path) { JsonElement el = this.root.get(path); if (el == null || !el.isJsonArray()) { @@ -122,18 +128,22 @@ public List getStringList(String path) { return list; } + @Override public void setString(String path, String value) { this.root.add(path, new JsonPrimitive(value)); } + @Override public void setBoolean(String path, boolean value) { this.root.add(path, new JsonPrimitive(value)); } + @Override public void setInteger(String path, int value) { this.root.add(path, new JsonPrimitive(value)); } + @Override public void setStringList(String path, List value) { JsonArray array = new JsonArray(); for (String str : value) { @@ -142,10 +152,12 @@ public void setStringList(String path, List value) { this.root.add(path, array); } + @Override public boolean contains(String path) { return this.root.has(path); } + @Override public void remove(String path) { this.root.remove(path); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/config/RuntimeConfiguration.java b/spark-common/src/main/java/me/lucko/spark/common/util/config/RuntimeConfiguration.java new file mode 100644 index 00000000..d0765547 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/config/RuntimeConfiguration.java @@ -0,0 +1,106 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util.config; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +public enum RuntimeConfiguration implements Configuration { + SYSTEM_PROPERTIES { + @Override + public String getString(String path, String def) { + return System.getProperty("spark." + path, def); + } + }, + + ENVIRONMENT_VARIABLES { + @Override + public String getString(String path, String def) { + String name = "SPARK_" + path.replace(".", "_").replace("-", "_").toUpperCase(); + String value = System.getenv(name); + return value != null ? value : def; + } + }; + + @Override + public boolean getBoolean(String path, boolean def) { + return Boolean.parseBoolean(getString(path, Boolean.toString(def))); + } + + @Override + public int getInteger(String path, int def) { + try { + return Integer.parseInt(getString(path, Integer.toString(def))); + } catch (NumberFormatException e) { + return def; + } + } + + @Override + public List getStringList(String path) { + String value = getString(path, ""); + if (value.isEmpty()) { + return Collections.emptyList(); + } + return Arrays.asList(value.split(",")); + } + + @Override + public boolean contains(String path) { + return getString(path, null) != null; + } + + @Override + public void load() { + // no-op + } + + @Override + public void save() { + // no-op + } + + @Override + public void setString(String path, String value) { + // no-op + } + + @Override + public void setBoolean(String path, boolean value) { + // no-op + } + + @Override + public void setInteger(String path, int value) { + // no-op + } + + @Override + public void setStringList(String path, List value) { + // no-op + } + + @Override + public void remove(String path) { + // no-op + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/ws/TrustedKeyStore.java b/spark-common/src/main/java/me/lucko/spark/common/ws/TrustedKeyStore.java index 1605a385..0d825145 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/ws/TrustedKeyStore.java +++ b/spark-common/src/main/java/me/lucko/spark/common/ws/TrustedKeyStore.java @@ -20,7 +20,7 @@ package me.lucko.spark.common.ws; -import me.lucko.spark.common.util.Configuration; +import me.lucko.spark.common.util.config.Configuration; import java.security.KeyPair; import java.security.PrivateKey; diff --git a/spark-common/src/main/java/me/lucko/spark/common/ws/ViewerSocket.java b/spark-common/src/main/java/me/lucko/spark/common/ws/ViewerSocket.java index 6a9c2b74..0356b7dc 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/ws/ViewerSocket.java +++ b/spark-common/src/main/java/me/lucko/spark/common/ws/ViewerSocket.java @@ -21,7 +21,6 @@ package me.lucko.spark.common.ws; import com.google.protobuf.ByteString; - import me.lucko.bytesocks.client.BytesocksClient; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.sampler.AbstractSampler; diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto index 20044155..f8d79889 100644 --- a/spark-common/src/main/proto/spark/spark.proto +++ b/spark-common/src/main/proto/spark/spark.proto @@ -11,6 +11,7 @@ message PlatformMetadata { string version = 3; string minecraft_version = 4; // optional int32 spark_version = 7; + string brand = 8; // replaced reserved 5, 6; @@ -32,6 +33,7 @@ message SystemStatistics { Java java = 6; int64 uptime = 7; map net = 8; + Jvm jvm = 9; message Cpu { int32 threads = 1; @@ -79,6 +81,12 @@ message SystemStatistics { string vm_args = 4; } + message Jvm { + string name = 1; + string vendor = 2; + string version = 3; + } + message NetInterface { RollingAverageValues rx_bytes_per_second = 1; RollingAverageValues tx_bytes_per_second = 2; @@ -96,13 +104,24 @@ message PlatformStatistics { Ping ping = 6; // optional int64 player_count = 7; // optional WorldStatistics world = 8; // optional + OnlineMode online_mode = 9; // optional message Memory { - MemoryPool heap = 1; + MemoryUsage heap = 1; + MemoryUsage non_heap = 2; + repeated MemoryPool pools = 3; message MemoryPool { + string name = 1; + MemoryUsage usage = 2; + MemoryUsage collection_usage = 3; + } + + message MemoryUsage { int64 used = 1; - int64 total = 2; + int64 committed = 2; // previously called 'total' + int64 init = 3; // optional + int64 max = 4; // optional } } @@ -126,12 +145,20 @@ message PlatformStatistics { message Ping { RollingAverageValues last15m = 1; } + + enum OnlineMode { + UNKNOWN = 0; + OFFLINE = 1; + ONLINE = 2; + } } message WorldStatistics { int32 total_entities = 1; map entity_counts = 2; repeated World worlds = 3; + repeated GameRule game_rules = 4; + repeated DataPack data_packs = 5; message World { string name = 1; @@ -150,6 +177,18 @@ message WorldStatistics { int32 total_entities = 3; map entity_counts = 4; } + + message GameRule { + string name = 1; + string default_value = 2; + map world_values = 3; + } + + message DataPack { + string name = 1; + string description = 2; + string source = 3; + } } message WindowStatistics { @@ -190,3 +229,26 @@ message CommandSenderMetadata { PLAYER = 1; } } + +message PluginOrModMetadata { + string name = 1; + string version = 2; + string author = 3; + string description = 4; +} + +message HealthData { + HealthMetadata metadata = 1; + map time_window_statistics = 2; +} + +message HealthMetadata { + CommandSenderMetadata creator = 1; + PlatformMetadata platform_metadata = 2; + PlatformStatistics platform_statistics = 3; + SystemStatistics system_statistics = 4; + int64 generated_time = 5; + map server_configurations = 6; + map sources = 7; + map extra_platform_metadata = 8; +} diff --git a/spark-common/src/main/proto/spark/spark_heap.proto b/spark-common/src/main/proto/spark/spark_heap.proto index 59f2b854..aef7888b 100644 --- a/spark-common/src/main/proto/spark/spark_heap.proto +++ b/spark-common/src/main/proto/spark/spark_heap.proto @@ -17,6 +17,10 @@ message HeapMetadata { PlatformMetadata platform_metadata = 2; PlatformStatistics platform_statistics = 3; SystemStatistics system_statistics = 4; + int64 generated_time = 5; + map server_configurations = 6; + map sources = 7; + map extra_platform_metadata = 8; } message HeapEntry { diff --git a/spark-common/src/main/proto/spark/spark_sampler.proto b/spark-common/src/main/proto/spark/spark_sampler.proto index dbc336a7..bd48e7d3 100644 --- a/spark-common/src/main/proto/spark/spark_sampler.proto +++ b/spark-common/src/main/proto/spark/spark_sampler.proto @@ -31,9 +31,10 @@ message SamplerMetadata { map server_configurations = 10; int64 end_time = 11; int32 number_of_ticks = 12; - map sources = 13; + map sources = 13; map extra_platform_metadata = 14; SamplerMode sampler_mode = 15; + SamplerEngine sampler_engine = 16; message ThreadDumper { Type type = 1; @@ -65,15 +66,15 @@ message SamplerMetadata { } } - message SourceMetadata { - string name = 1; - string version = 2; - } - enum SamplerMode { EXECUTION = 0; ALLOCATION = 1; } + + enum SamplerEngine { + JAVA = 0; + ASYNC = 1; + } } message ThreadNode { diff --git a/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so b/spark-common/src/main/resources/spark-native/linux/aarch64/libasyncProfiler.so similarity index 100% rename from spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so rename to spark-common/src/main/resources/spark-native/linux/aarch64/libasyncProfiler.so diff --git a/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so b/spark-common/src/main/resources/spark-native/linux/amd64-musl/libasyncProfiler.so similarity index 100% rename from spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so rename to spark-common/src/main/resources/spark-native/linux/amd64-musl/libasyncProfiler.so diff --git a/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so b/spark-common/src/main/resources/spark-native/linux/amd64/libasyncProfiler.so similarity index 100% rename from spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so rename to spark-common/src/main/resources/spark-native/linux/amd64/libasyncProfiler.so diff --git a/spark-common/src/main/resources/spark/macos/libasyncProfiler.so b/spark-common/src/main/resources/spark-native/macos/libasyncProfiler.so similarity index 100% rename from spark-common/src/main/resources/spark/macos/libasyncProfiler.so rename to spark-common/src/main/resources/spark-native/macos/libasyncProfiler.so diff --git a/spark-common/src/test/java/me/lucko/spark/common/SparkPlatformTest.java b/spark-common/src/test/java/me/lucko/spark/common/SparkPlatformTest.java new file mode 100644 index 00000000..fb2ae5e4 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/SparkPlatformTest.java @@ -0,0 +1,107 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common; + +import com.google.common.collect.ImmutableSet; +import me.lucko.spark.common.command.sender.CommandSender; +import me.lucko.spark.test.plugin.TestSparkPlugin; +import net.kyori.adventure.text.Component; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.nio.file.Path; +import java.util.HashSet; +import java.util.Set; +import java.util.UUID; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class SparkPlatformTest { + + @Test + public void testEnableDisable(@TempDir Path directory) { + try (TestSparkPlugin plugin = new TestSparkPlugin(directory)) { + assertTrue(plugin.platform().hasEnabled()); + } + } + + @Test + public void testPermissions(@TempDir Path directory) { + try (TestSparkPlugin plugin = new TestSparkPlugin(directory)) { + SparkPlatform platform = plugin.platform(); + + Set permissions = platform.getAllSparkPermissions(); + assertEquals( + ImmutableSet.of( + "spark", + "spark.profiler", + "spark.tps", + "spark.ping", + "spark.healthreport", + "spark.tickmonitor", + "spark.gc", + "spark.gcmonitor", + "spark.heapsummary", + "spark.heapdump", + "spark.activity" + ), + permissions + ); + + TestCommandSender testSender = new TestCommandSender(); + assertFalse(platform.hasPermissionForAnyCommand(testSender)); + + testSender.permissions.add("spark.tps"); + assertTrue(platform.hasPermissionForAnyCommand(testSender)); + + testSender.permissions.clear(); + testSender.permissions.add("spark"); + assertTrue(platform.hasPermissionForAnyCommand(testSender)); + } + } + + private static final class TestCommandSender implements CommandSender { + private final Set permissions = new HashSet<>(); + + @Override + public String getName() { + return "Test"; + } + + @Override + public UUID getUniqueId() { + return new UUID(0, 0); + } + + @Override + public void sendMessage(Component message) { + + } + + @Override + public boolean hasPermission(String permission) { + return this.permissions.contains(permission); + } + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/activitylog/ActivityLogTest.java b/spark-common/src/test/java/me/lucko/spark/common/activitylog/ActivityLogTest.java new file mode 100644 index 00000000..a94f954a --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/activitylog/ActivityLogTest.java @@ -0,0 +1,54 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.activitylog; + +import me.lucko.spark.common.command.sender.CommandSender; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.nio.file.Path; +import java.util.UUID; +import java.util.stream.Collectors; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class ActivityLogTest { + + private static final CommandSender.Data USER = new CommandSender.Data("Test", UUID.fromString("5937921d-7051-45e1-bac7-3bbfdc12444f")); + + @Test + public void testSaveLoad(@TempDir Path tempDir) { + ActivityLog log = new ActivityLog(tempDir.resolve("activity-log.json")); + log.addToLog(Activity.fileActivity(USER, 1721937782184L, "Profiler", "path/to/profile.sparkprofile")); + log.addToLog(Activity.urlActivity(USER, 1721937782184L, "Profiler", "https://spark.lucko.me/abcd")); + log.save(); + + ActivityLog log2 = new ActivityLog(tempDir.resolve("activity-log.json")); + log2.load(); + + // check the log contents + assertEquals( + log.getLog().stream().map(Activity::serialize).collect(Collectors.toList()), + log2.getLog().stream().map(Activity::serialize).collect(Collectors.toList()) + ); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/activitylog/ActivityTest.java b/spark-common/src/test/java/me/lucko/spark/common/activitylog/ActivityTest.java new file mode 100644 index 00000000..5bf88f81 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/activitylog/ActivityTest.java @@ -0,0 +1,78 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.activitylog; + +import com.google.gson.Gson; +import com.google.gson.JsonElement; +import me.lucko.spark.common.command.sender.CommandSender; +import org.junit.jupiter.api.Test; + +import java.util.UUID; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class ActivityTest { + private static final Gson GSON = new Gson(); + + private static final CommandSender.Data USER = new CommandSender.Data("Test", UUID.fromString("5937921d-7051-45e1-bac7-3bbfdc12444f")); + + private static final String FILE_ACTIVITY_JSON = "{\"user\":{\"type\":\"player\",\"name\":\"Test\",\"uniqueId\":\"5937921d-7051-45e1-bac7-3bbfdc12444f\"},\"time\":1721937782184,\"type\":\"Profiler\",\"data\":{\"type\":\"file\",\"value\":\"path/to/profile.sparkprofile\"}}"; + private static final String URL_ACTIVITY_JSON = "{\"user\":{\"type\":\"player\",\"name\":\"Test\",\"uniqueId\":\"5937921d-7051-45e1-bac7-3bbfdc12444f\"},\"time\":1721937782184,\"type\":\"Profiler\",\"data\":{\"type\":\"url\",\"value\":\"https://spark.lucko.me/abcd\"}}"; + + @Test + public void testSerialize() { + Activity fileActivity = Activity.fileActivity( + USER, + 1721937782184L, + "Profiler", + "path/to/profile.sparkprofile" + ); + assertEquals(FILE_ACTIVITY_JSON, GSON.toJson(fileActivity.serialize())); + + Activity urlActivity = Activity.urlActivity( + USER, + 1721937782184L, + "Profiler", + "https://spark.lucko.me/abcd" + ); + assertEquals(URL_ACTIVITY_JSON, GSON.toJson(urlActivity.serialize())); + } + + @Test + public void testDeserialize() { + Activity fileActivity = Activity.deserialize(GSON.fromJson(FILE_ACTIVITY_JSON, JsonElement.class)); + assertEquals(USER.getUniqueId(), fileActivity.getUser().getUniqueId()); + assertEquals(USER.getName(), fileActivity.getUser().getName()); + assertEquals(1721937782184L, fileActivity.getTime()); + assertEquals("Profiler", fileActivity.getType()); + assertEquals(Activity.DATA_TYPE_FILE, fileActivity.getDataType()); + assertEquals("path/to/profile.sparkprofile", fileActivity.getDataValue()); + + Activity urlActivity = Activity.deserialize(GSON.fromJson(URL_ACTIVITY_JSON, JsonElement.class)); + assertEquals(USER.getUniqueId(), urlActivity.getUser().getUniqueId()); + assertEquals(USER.getName(), urlActivity.getUser().getName()); + assertEquals(1721937782184L, urlActivity.getTime()); + assertEquals("Profiler", urlActivity.getType()); + assertEquals(Activity.DATA_TYPE_URL, urlActivity.getDataType()); + assertEquals("https://spark.lucko.me/abcd", urlActivity.getDataValue()); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/command/ArgumentsTest.java b/spark-common/src/test/java/me/lucko/spark/common/command/ArgumentsTest.java new file mode 100644 index 00000000..dda6c5d4 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/command/ArgumentsTest.java @@ -0,0 +1,137 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.command; + +import com.google.common.collect.ImmutableList; +import org.junit.jupiter.api.Test; + +import java.util.Set; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrowsExactly; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class ArgumentsTest { + + @Test + public void testInitialParse() { + Arguments arguments = new Arguments(ImmutableList.of("hello"), true); + assertEquals("hello", arguments.subCommand()); + + Arguments.ParseException exception = assertThrowsExactly( + Arguments.ParseException.class, + () -> new Arguments(ImmutableList.of("hello"), false) + ); + assertEquals("Expected flag at position 0 but got 'hello' instead!", exception.getMessage()); + + exception = assertThrowsExactly( + Arguments.ParseException.class, + () -> new Arguments(ImmutableList.of("hello", "world"), true) + ); + assertEquals("Expected flag at position 1 but got 'world' instead!", exception.getMessage()); + } + + @Test + public void testStringFlag() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "hello"), false); + + Set values = arguments.stringFlag("test-flag"); + assertEquals(1, values.size()); + assertEquals("hello", values.iterator().next()); + } + + @Test + public void testStringFlagWithSpace() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "hello", "world"), false); + + Set values = arguments.stringFlag("test-flag"); + assertEquals(1, values.size()); + assertEquals("hello world", values.iterator().next()); + } + + @Test + public void testStringFlagWithMultipleValues() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "hello", "--test-flag", "world"), false); + + Set values = arguments.stringFlag("test-flag"); + assertEquals(2, values.size()); + assertEquals(ImmutableList.of("hello", "world"), ImmutableList.copyOf(values)); + } + + @Test + public void testMissingStringFlag() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "hello"), false); + + Set values = arguments.stringFlag("missing-flag"); + assertEquals(0, values.size()); + } + + @Test + public void testIntFlag() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "123", "--negative-test", "-100"), false); + + int value = arguments.intFlag("test-flag"); + assertEquals(123, value); + + value = arguments.intFlag("negative-test"); + assertEquals(100, value); + } + + @Test + public void testMissingIntFlag() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "hello"), false); + + int value = arguments.intFlag("missing-flag"); + assertEquals(-1, value); + } + + @Test + public void testDoubleFlag() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "123.45", "--negative-test", "-100.5"), false); + + double value = arguments.doubleFlag("test-flag"); + assertEquals(123.45, value, 0.0001); + + value = arguments.doubleFlag("negative-test"); + assertEquals(100.5, value, 0.0001); + } + + @Test + public void testMissingDoubleFlag() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "hello"), false); + + double value = arguments.doubleFlag("missing-flag"); + assertEquals(-1, value); + } + + @Test + public void testBooleanFlag() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag"), false); + + boolean value = arguments.boolFlag("test-flag"); + assertTrue(value); + + value = arguments.boolFlag("negative-test"); + assertFalse(value); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/heapdump/HeapDumpSummaryTest.java b/spark-common/src/test/java/me/lucko/spark/common/heapdump/HeapDumpSummaryTest.java new file mode 100644 index 00000000..b2bb384e --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/heapdump/HeapDumpSummaryTest.java @@ -0,0 +1,63 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.heapdump; + +import me.lucko.spark.proto.SparkHeapProtos; +import me.lucko.spark.test.TestClass; +import me.lucko.spark.test.plugin.TestCommandSender; +import me.lucko.spark.test.plugin.TestSparkPlugin; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.nio.file.Path; +import java.util.List; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +public class HeapDumpSummaryTest { + + @Test + public void testHeapDumpSummary(@TempDir Path directory) throws Exception { + TestClass testClass1 = new TestClass(); + TestClass testClass2 = new TestClass(); + + HeapDumpSummary dump = HeapDumpSummary.createNew(); + List entries = dump.getEntries(); + + HeapDumpSummary.Entry thisClassEntry = entries.stream().filter(entry -> entry.getType().equals(TestClass.class.getName())).findAny().orElse(null); + assertNotNull(thisClassEntry); + assertEquals(2, thisClassEntry.getInstances()); + assertEquals(32, thisClassEntry.getBytes()); + + SparkHeapProtos.HeapData proto; + try (TestSparkPlugin plugin = new TestSparkPlugin(directory)) { + proto = dump.toProto(plugin.platform(), TestCommandSender.INSTANCE.toData()); + } + assertNotNull(proto); + + SparkHeapProtos.HeapEntry protoEntry = proto.getEntriesList().stream().filter(entry -> entry.getType().equals(TestClass.class.getName())).findAny().orElse(null); + assertNotNull(protoEntry); + assertEquals(2, protoEntry.getInstances()); + assertEquals(32, protoEntry.getSize()); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/heapdump/HeapDumpTest.java b/spark-common/src/test/java/me/lucko/spark/common/heapdump/HeapDumpTest.java new file mode 100644 index 00000000..5df5c5d9 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/heapdump/HeapDumpTest.java @@ -0,0 +1,41 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.heapdump; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.nio.file.Files; +import java.nio.file.Path; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class HeapDumpTest { + + @Test + public void testHeapDump(@TempDir Path tempDir) throws Exception { + Path file = tempDir.resolve("heapdump.hprof"); + HeapDump.dumpHeap(file, false); + assertTrue(Files.exists(file)); + Files.delete(file); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/monitor/cpu/CpuInfoTest.java b/spark-common/src/test/java/me/lucko/spark/common/monitor/cpu/CpuInfoTest.java new file mode 100644 index 00000000..047e80dd --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/monitor/cpu/CpuInfoTest.java @@ -0,0 +1,38 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.monitor.cpu; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +public class CpuInfoTest { + + @Test + public void testCpuInfo() { + String model = CpuInfo.queryCpuModel(); + assertNotNull(model); + assertFalse(model.isEmpty()); + System.out.println(model); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/monitor/cpu/CpuMonitorTest.java b/spark-common/src/test/java/me/lucko/spark/common/monitor/cpu/CpuMonitorTest.java new file mode 100644 index 00000000..d554976e --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/monitor/cpu/CpuMonitorTest.java @@ -0,0 +1,35 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.monitor.cpu; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class CpuMonitorTest { + + @Test + public void testCpuLoad() { + assertTrue(CpuMonitor.processLoad() >= 0); + assertTrue(CpuMonitor.systemLoad() >= 0); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/monitor/disk/DiskUsageTest.java b/spark-common/src/test/java/me/lucko/spark/common/monitor/disk/DiskUsageTest.java new file mode 100644 index 00000000..d961b2fc --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/monitor/disk/DiskUsageTest.java @@ -0,0 +1,35 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.monitor.disk; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class DiskUsageTest { + + @Test + public void testDiskUsage() { + assertTrue(DiskUsage.getUsed() > 0); + assertTrue(DiskUsage.getTotal() > 0); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/monitor/memory/MemoryInfoTest.java b/spark-common/src/test/java/me/lucko/spark/common/monitor/memory/MemoryInfoTest.java new file mode 100644 index 00000000..5ae8fdc0 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/monitor/memory/MemoryInfoTest.java @@ -0,0 +1,36 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.monitor.memory; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class MemoryInfoTest { + + @Test + public void testMemoryInfo() { + assertTrue(MemoryInfo.getUsedPhysicalMemory() > 0); + assertTrue(MemoryInfo.getTotalPhysicalMemory() > 0); + assertTrue(MemoryInfo.getAvailablePhysicalMemory() > 0); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfoTest.java b/spark-common/src/test/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfoTest.java new file mode 100644 index 00000000..6b50584c --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfoTest.java @@ -0,0 +1,57 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.monitor.net; + +import com.google.common.collect.ImmutableSet; +import org.junit.jupiter.api.Test; + +import java.util.Arrays; +import java.util.Map; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +public class NetworkInterfaceInfoTest { + + @Test + public void testLinuxProcParse() { + String input = + "Inter-| Receive | Transmit\n" + + " face |bytes packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed\n" + + " lo: 2776770 11307 0 0 0 0 0 0 2776770 11307 0 0 0 0 0 0\n" + + " eth0: 1215645 2751 1 0 0 0 0 0 1782404 4324 2 0 0 427 0 0\n" + + " ppp0: 1622270 5552 1 0 0 0 0 0 354130 5669 0 0 0 0 0 0\n" + + " tap0: 7714 81 0 0 0 0 0 0 7714 81 0 0 0 0 0 0"; + + Map map = NetworkInterfaceInfo.read(Arrays.asList(input.split("\n"))); + assertNotNull(map); + assertEquals(ImmutableSet.of("lo", "eth0", "ppp0", "tap0"), map.keySet()); + + NetworkInterfaceInfo eth0 = map.get("eth0"); + assertEquals(1215645, eth0.getReceivedBytes()); + assertEquals(2751, eth0.getReceivedPackets()); + assertEquals(1, eth0.getReceiveErrors()); + assertEquals(1782404, eth0.getTransmittedBytes()); + assertEquals(4324, eth0.getTransmittedPackets()); + assertEquals(2, eth0.getTransmitErrors()); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/monitor/os/OperatingSystemInfoTest.java b/spark-common/src/test/java/me/lucko/spark/common/monitor/os/OperatingSystemInfoTest.java new file mode 100644 index 00000000..3e4fd132 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/monitor/os/OperatingSystemInfoTest.java @@ -0,0 +1,42 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.monitor.os; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertNotNull; + +public class OperatingSystemInfoTest { + + @Test + public void testOperatingSystemInfo() { + OperatingSystemInfo info = OperatingSystemInfo.poll(); + assertNotNull(info); + assertNotNull(info.name()); + assertNotNull(info.version()); + assertNotNull(info.arch()); + + System.out.println(info.name()); + System.out.println(info.version()); + System.out.println(info.arch()); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/platform/PlatformStatisticsProviderTest.java b/spark-common/src/test/java/me/lucko/spark/common/platform/PlatformStatisticsProviderTest.java new file mode 100644 index 00000000..4b26322c --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/platform/PlatformStatisticsProviderTest.java @@ -0,0 +1,51 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform; + +import me.lucko.spark.proto.SparkProtos; +import me.lucko.spark.test.plugin.TestSparkPlugin; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.nio.file.Path; +import java.util.Collections; + +import static org.junit.jupiter.api.Assertions.assertNotNull; + +public class PlatformStatisticsProviderTest { + + @Test + public void testSystemStatistics(@TempDir Path directory) { + try (TestSparkPlugin plugin = new TestSparkPlugin(directory)) { + SparkProtos.SystemStatistics systemStatistics = new PlatformStatisticsProvider(plugin.platform()).getSystemStatistics(); + assertNotNull(systemStatistics); + } + } + + @Test + public void testPlatformStatistics(@TempDir Path directory) { + try (TestSparkPlugin plugin = new TestSparkPlugin(directory)) { + SparkProtos.PlatformStatistics platformStatistics = new PlatformStatisticsProvider(plugin.platform()).getPlatformStatistics(Collections.emptyMap(), true); + assertNotNull(platformStatistics); + } + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/platform/SparkMetadataTest.java b/spark-common/src/test/java/me/lucko/spark/common/platform/SparkMetadataTest.java new file mode 100644 index 00000000..e2b83746 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/platform/SparkMetadataTest.java @@ -0,0 +1,43 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform; + +import me.lucko.spark.test.plugin.TestCommandSender; +import me.lucko.spark.test.plugin.TestSparkPlugin; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.nio.file.Path; +import java.util.Collections; + +import static org.junit.jupiter.api.Assertions.assertNotNull; + +public class SparkMetadataTest { + + @Test + public void testGather(@TempDir Path directory) { + try (TestSparkPlugin plugin = new TestSparkPlugin(directory)) { + SparkMetadata metadata = SparkMetadata.gather(plugin.platform(), TestCommandSender.INSTANCE.toData(), Collections.emptyMap()); + assertNotNull(metadata); + } + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilterTest.java b/spark-common/src/test/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilterTest.java new file mode 100644 index 00000000..ba6f958d --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilterTest.java @@ -0,0 +1,106 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.serverconfig; + +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Maps; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonPrimitive; +import org.junit.jupiter.api.Test; + +import java.util.Map; +import java.util.Set; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class ExcludedConfigFilterTest { + + @Test + public void testFilter() { + Set excluded = ImmutableSet.builder() + .add("database") + .add("settings.bungeecord-addresses") + .add("rconpassword") + .add("world-settings.*.feature-seeds") + .add("world-settings.*.seed-*") + .build(); + + ExcludedConfigFilter filter = new ExcludedConfigFilter(excluded); + + JsonPrimitive value = new JsonPrimitive("hello"); + JsonObject before = obj( + element("hello", value), + element("database", obj( + element("username", value), + element("password", value) + )), + element("settings", obj( + element("hello", value), + element("bungeecord-addresses", value) + )), + element("rcon.password", value), + element("world-settings", obj( + element("world1", obj( + element("hello", value), + element("feature-seeds", value), + element("seed-test", value) + )), + element("world2", obj( + element("hello", value), + element("feature-seeds", value), + element("seed-test", value) + )) + )) + ); + JsonObject after = obj( + element("hello", value), + element("settings", obj( + element("hello", value) + )), + element("world-settings", obj( + element("world1", obj( + element("hello", value) + )), + element("world2", obj( + element("hello", value) + )) + )) + ); + + + assertEquals(after, filter.apply(before)); + } + + @SafeVarargs + private static JsonObject obj(Map.Entry... elements) { + JsonObject object = new JsonObject(); + for (Map.Entry element : elements) { + object.add(element.getKey(), element.getValue()); + } + return object; + } + + private static Map.Entry element(String key, JsonElement value) { + return Maps.immutableEntry(key, value); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParserTest.java b/spark-common/src/test/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParserTest.java new file mode 100644 index 00000000..2b686cac --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParserTest.java @@ -0,0 +1,50 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.serverconfig; + +import com.google.common.collect.ImmutableMap; +import org.junit.jupiter.api.Test; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.StringReader; +import java.util.Map; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class PropertiesConfigParserTest { + + @Test + public void testParse() throws IOException { + String properties = + "hello=world\n" + + "a.b.c=1\n" + + "foo=true\n"; + + Map parse = PropertiesConfigParser.INSTANCE.parse(new BufferedReader(new StringReader(properties))); + assertEquals(ImmutableMap.of( + "hello", "world", + "a.b.c", 1L, + "foo", true + ), parse); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/platform/world/CountMapTest.java b/spark-common/src/test/java/me/lucko/spark/common/platform/world/CountMapTest.java new file mode 100644 index 00000000..8dcc8a95 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/platform/world/CountMapTest.java @@ -0,0 +1,49 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.world; + +import org.junit.jupiter.api.Test; + +import java.util.HashMap; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class CountMapTest { + + @Test + public void testSimple() { + CountMap.Simple countMap = new CountMap.Simple<>(new HashMap<>()); + assertTrue(countMap.asMap().isEmpty()); + + countMap.increment("test"); + assertTrue(countMap.asMap().containsKey("test")); + assertEquals(1, countMap.asMap().get("test").get()); + + countMap.add("test", 5); + assertEquals(6, countMap.asMap().get("test").get()); + + countMap.increment("test2"); + + assertEquals(7, countMap.total().get()); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/sampler/SamplerTest.java b/spark-common/src/test/java/me/lucko/spark/common/sampler/SamplerTest.java new file mode 100644 index 00000000..53fc7e65 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/sampler/SamplerTest.java @@ -0,0 +1,95 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler; + +import me.lucko.spark.common.sampler.java.MergeStrategy; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.proto.SparkSamplerProtos; +import me.lucko.spark.test.TestClass2; +import me.lucko.spark.test.plugin.TestCommandSender; +import me.lucko.spark.test.plugin.TestSparkPlugin; +import org.junit.jupiter.api.io.TempDir; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; + +import java.nio.file.Path; +import java.util.List; +import java.util.Locale; +import java.util.concurrent.TimeUnit; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assumptions.assumeTrue; + +public class SamplerTest { + + @ParameterizedTest + @EnumSource + public void testSampler(SamplerType samplerType, @TempDir Path directory) { + if (samplerType == SamplerType.ASYNC) { + String os = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", ""); + assumeTrue(os.equals("linux") || os.equals("macosx"), "async profiler is only supported on Linux and macOS"); + } + + Thread thread = new Thread(new TestClass2(), "Test Thread"); + thread.start(); + + try (TestSparkPlugin plugin = new TestSparkPlugin(directory)) { + Sampler sampler = new SamplerBuilder() + .threadDumper(new ThreadDumper.Specific(thread)) + .threadGrouper(ThreadGrouper.BY_POOL) + .samplingInterval(10) + .forceJavaSampler(samplerType == SamplerType.JAVA) + .completeAfter(2, TimeUnit.SECONDS) + .start(plugin.platform()); + + assertInstanceOf(samplerType.implClass(), sampler); + assertEquals(samplerType, sampler.getType()); + + assertNotEquals(-1, sampler.getAutoEndTime()); + sampler.getFuture().join(); + + Sampler.ExportProps exportProps = new Sampler.ExportProps() + .creator(TestCommandSender.INSTANCE.toData()) + .classSourceLookup(() -> ClassSourceLookup.create(plugin.platform())); + + if (samplerType == SamplerType.JAVA) { + exportProps.mergeStrategy(MergeStrategy.SAME_METHOD); + } + + SparkSamplerProtos.SamplerData proto = sampler.toProto(plugin.platform(), exportProps); + assertNotNull(proto); + + List threads = proto.getThreadsList(); + assertEquals(1, threads.size()); + + SparkSamplerProtos.ThreadNode protoThread = threads.get(0); + assertEquals("Test Thread", protoThread.getName()); + assertTrue(protoThread.getChildrenList().stream().anyMatch(n -> n.getClassName().equals("me.lucko.spark.test.TestClass2") && n.getMethodName().equals("test"))); + assertTrue(protoThread.getChildrenList().stream().anyMatch(n -> n.getClassName().equals("me.lucko.spark.test.TestClass2") && n.getMethodName().equals("testA"))); + assertTrue(protoThread.getChildrenList().stream().anyMatch(n -> n.getClassName().equals("me.lucko.spark.test.TestClass2") && n.getMethodName().equals("testB"))); + } + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/sampler/ThreadDumperTest.java b/spark-common/src/test/java/me/lucko/spark/common/sampler/ThreadDumperTest.java new file mode 100644 index 00000000..b96f1493 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/sampler/ThreadDumperTest.java @@ -0,0 +1,64 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler; + +import org.junit.jupiter.api.Test; + +import java.util.Set; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class ThreadDumperTest { + + @Test + public void testAll() { + assertTrue(ThreadDumper.ALL.isThreadIncluded(1, "test")); + assertTrue(ThreadDumper.ALL.isThreadIncluded(2, "test2")); + } + + @Test + public void testSpecific() { + Thread thread = new Thread(() -> { + try { + Thread.sleep(100_000); + } catch (InterruptedException e) { + // ignore + } + }, "test-thread-1"); + thread.start(); + + ThreadDumper.Specific specific = new ThreadDumper.Specific(thread); + + assertTrue(specific.isThreadIncluded(thread.getId(), "test-thread-1")); + + Set threads = specific.getThreads(); + assertEquals(1, threads.size()); + assertTrue(threads.contains(thread)); + + Set threadNames = specific.getThreadNames(); + assertEquals(1, threadNames.size()); + assertTrue(threadNames.contains("test-thread-1")); + + thread.interrupt(); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/sampler/ThreadGrouperTest.java b/spark-common/src/test/java/me/lucko/spark/common/sampler/ThreadGrouperTest.java new file mode 100644 index 00000000..5f4e5aeb --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/sampler/ThreadGrouperTest.java @@ -0,0 +1,80 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class ThreadGrouperTest { + + @Test + public void testByName() { + ThreadGrouper threadGrouper = ThreadGrouper.BY_NAME.get(); + + String group = threadGrouper.getGroup(1, "main"); + assertEquals("main", group); + + String label = threadGrouper.getLabel("main"); + assertEquals("main", label); + } + + @Test + public void testAsOne() { + ThreadGrouper threadGrouper = ThreadGrouper.AS_ONE.get(); + + String group = threadGrouper.getGroup(1, "main"); + assertEquals("root", group); + + String label = threadGrouper.getLabel("root"); + assertEquals("All (x1)", label); + + group = threadGrouper.getGroup(2, "main2"); + assertEquals("root", group); + + label = threadGrouper.getLabel("root"); + assertEquals("All (x2)", label); + } + + @Test + public void testByPool() { + ThreadGrouper threadGrouper = ThreadGrouper.BY_POOL.get(); + + String group = threadGrouper.getGroup(1, "main"); + assertEquals("main", group); + + String label = threadGrouper.getLabel("main"); + assertEquals("main", label); + + group = threadGrouper.getGroup(2, "Test Pool - #1"); + assertEquals("Test Pool", group); + + label = threadGrouper.getLabel("Test Pool"); + assertEquals("Test Pool (x1)", label); + + group = threadGrouper.getGroup(3, "Test Pool - #2"); + assertEquals("Test Pool", group); + + label = threadGrouper.getLabel("Test Pool"); + assertEquals("Test Pool (x2)", label); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/sampler/node/NodeTest.java b/spark-common/src/test/java/me/lucko/spark/common/sampler/node/NodeTest.java new file mode 100644 index 00000000..52477ce4 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/sampler/node/NodeTest.java @@ -0,0 +1,196 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.node; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import me.lucko.spark.common.sampler.SamplerMode; +import me.lucko.spark.common.sampler.async.AsyncNodeExporter; +import me.lucko.spark.common.sampler.async.AsyncStackTraceElement; +import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; +import me.lucko.spark.proto.SparkSamplerProtos; +import org.junit.jupiter.api.Test; + +import java.util.Collection; +import java.util.concurrent.TimeUnit; + +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class NodeTest { + + private static final StackTraceNode.Describer STACK_TRACE_DESCRIBER = (element, parent) -> new StackTraceNode.AsyncDescription(element.getClassName(), element.getMethodName(), element.getMethodDescription()); + private static final int WINDOW = 10; + + private static final AsyncStackTraceElement NODE_0 = new AsyncStackTraceElement("java.lang.Thread", "run", "()V"); + private static final AsyncStackTraceElement NODE_1_1 = new AsyncStackTraceElement("test.Foo", "run", "()V"); + private static final AsyncStackTraceElement NODE_1_2_1 = new AsyncStackTraceElement("test.Foo", "example", "()V"); + private static final AsyncStackTraceElement NODE_2_1 = new AsyncStackTraceElement("test.Bar", "run", "()V"); + private static final AsyncStackTraceElement NODE_2_2_1 = new AsyncStackTraceElement("test.Bar", "example", "()V"); + + private static final AsyncStackTraceElement[] STACK_1 = {NODE_1_2_1, NODE_1_1, NODE_0}; + private static final AsyncStackTraceElement[] STACK_2 = {NODE_2_2_1, NODE_2_1, NODE_0}; + + @Test + public void testThreadLabels() { + ThreadNode node = new ThreadNode("Test Thread"); + assertEquals("Test Thread", node.getThreadGroup()); + assertEquals("Test Thread", node.getThreadLabel()); + + node.setThreadLabel("Test"); + assertEquals("Test", node.getThreadLabel()); + } + + @Test + public void testBasicLog() { + ThreadNode threadNode = new ThreadNode("Test Thread"); + assertEquals(0, threadNode.getTimeWindows().size()); + + threadNode.log(STACK_TRACE_DESCRIBER, STACK_1, TimeUnit.SECONDS.toMicros(1), WINDOW); + + Collection children1 = threadNode.getChildren(); + assertEquals(1, children1.size()); + assertEquals(ImmutableSet.of(WINDOW), threadNode.getTimeWindows()); + + StackTraceNode node1 = children1.iterator().next(); + assertEquals(ImmutableSet.of(WINDOW), node1.getTimeWindows()); + assertEquals("java.lang.Thread", node1.getClassName()); + assertEquals("run", node1.getMethodName()); + assertEquals("()V", node1.getMethodDescription()); + assertEquals(StackTraceNode.NULL_LINE_NUMBER, node1.getLineNumber()); + assertEquals(StackTraceNode.NULL_LINE_NUMBER, node1.getParentLineNumber()); + assertEquals(TimeUnit.SECONDS.toMicros(1), node1.getTimeAccumulator(WINDOW).longValue()); + + threadNode.log(STACK_TRACE_DESCRIBER, STACK_2, TimeUnit.SECONDS.toMicros(1), WINDOW); + assertEquals(TimeUnit.SECONDS.toMicros(2), node1.getTimeAccumulator(WINDOW).longValue()); + + Collection children2 = node1.getChildren(); + assertEquals(2, children2.size()); + + for (StackTraceNode node2 : children2) { + assertEquals(ImmutableSet.of(WINDOW), node2.getTimeWindows()); + assertEquals(TimeUnit.SECONDS.toMicros(1), node2.getTimeAccumulator(WINDOW).longValue()); + } + } + + @Test + public void testExport() { + ThreadNode threadNode = new ThreadNode("Test Thread"); + threadNode.log(STACK_TRACE_DESCRIBER, STACK_1, TimeUnit.SECONDS.toMicros(1), WINDOW); + threadNode.log(STACK_TRACE_DESCRIBER, STACK_1, TimeUnit.SECONDS.toMicros(1), WINDOW + 1); + threadNode.log(STACK_TRACE_DESCRIBER, STACK_2, TimeUnit.SECONDS.toMicros(1), WINDOW + 1); + + ProtoTimeEncoder timeEncoder = new ProtoTimeEncoder(SamplerMode.EXECUTION.valueTransformer(), ImmutableList.of(threadNode)); + int[] keys = timeEncoder.getKeys(); + assertArrayEquals(new int[]{WINDOW, WINDOW + 1}, keys); + + SparkSamplerProtos.ThreadNode proto = new AsyncNodeExporter(timeEncoder).export(threadNode); + + SparkSamplerProtos.ThreadNode expected = SparkSamplerProtos.ThreadNode.newBuilder() + .setName("Test Thread") + .addTimes(1000) + .addTimes(2000) + .addChildren(SparkSamplerProtos.StackTraceNode.newBuilder() + .setClassName("test.Bar") + .setMethodDesc("()V") + .setMethodName("example") + .addTimes(0) + .addTimes(1000) + ) + .addChildren(SparkSamplerProtos.StackTraceNode.newBuilder() + .setClassName("test.Bar") + .setMethodDesc("()V") + .setMethodName("run") + .addTimes(0) + .addTimes(1000) + .addChildrenRefs(0) + ) + .addChildren(SparkSamplerProtos.StackTraceNode.newBuilder() + .setClassName("test.Foo") + .setMethodDesc("()V") + .setMethodName("example") + .addTimes(1000) + .addTimes(1000) + ) + .addChildren(SparkSamplerProtos.StackTraceNode.newBuilder() + .setClassName("test.Foo") + .setMethodDesc("()V") + .setMethodName("run") + .addTimes(1000) + .addTimes(1000) + .addChildrenRefs(2) + ) + .addChildren(SparkSamplerProtos.StackTraceNode.newBuilder() + .setClassName("java.lang.Thread") + .setMethodDesc("()V") + .setMethodName("run") + .addTimes(1000) + .addTimes(2000) + .addChildrenRefs(1) + .addChildrenRefs(3) + ) + .addChildrenRefs(4) + .build(); + + assertEquals(expected, proto); + } + + @Test + public void testRemoveTimeWindows() { + ThreadNode threadNode = new ThreadNode("Test Thread"); + threadNode.log(STACK_TRACE_DESCRIBER, STACK_1, TimeUnit.SECONDS.toMicros(1), WINDOW); + threadNode.log(STACK_TRACE_DESCRIBER, STACK_2, TimeUnit.SECONDS.toMicros(1), WINDOW + 1); + + StackTraceNode threadRunNode = threadNode.getChildren().iterator().next(); + Collection fooBarNodes = threadRunNode.getChildren(); + + assertEquals(2, threadNode.getTimeWindows().size()); + assertEquals(2, threadRunNode.getChildren().size()); + assertEquals(2, threadRunNode.getTimeWindows().size()); + + for (StackTraceNode node : fooBarNodes) { + assertEquals(1, node.getTimeWindows().size()); + assertEquals(1, node.getChildren().size()); + assertEquals(1, node.getChildren().iterator().next().getTimeWindows().size()); + assertEquals(0, node.getChildren().iterator().next().getChildren().size()); + } + + assertFalse(threadNode.removeTimeWindowsRecursively(w -> w == WINDOW)); + assertEquals(1, threadNode.getTimeWindows().size()); + assertEquals(1, threadRunNode.getChildren().size()); + assertEquals(1, threadRunNode.getTimeWindows().size()); + + assertTrue(threadNode.removeTimeWindowsRecursively(w -> w == WINDOW + 1)); + assertEquals(0, threadNode.getTimeWindows().size()); + assertEquals(0, threadNode.getChildren().size()); + + // doesn't bother updating nested children that have been removed + for (StackTraceNode node : fooBarNodes) { + assertEquals(1, node.getTimeWindows().size()); + assertEquals(1, node.getChildren().size()); + assertEquals(1, node.getChildren().iterator().next().getTimeWindows().size()); + assertEquals(0, node.getChildren().iterator().next().getChildren().size()); + } + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtilsTest.java b/spark-common/src/test/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtilsTest.java new file mode 100644 index 00000000..4161fe99 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtilsTest.java @@ -0,0 +1,68 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.window; + +import org.junit.jupiter.api.Test; + +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.Month; +import java.time.ZoneOffset; +import java.util.concurrent.TimeUnit; +import java.util.function.IntPredicate; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class ProfilingWindowUtilsTest { + + @Test + public void testMillisToWindow() { + int baseWindow = 28532770; + Instant baseTime = LocalDateTime.of(2024, Month.APRIL, 1, 10, 10, 0).toInstant(ZoneOffset.UTC); + + assertEquals(TimeUnit.MILLISECONDS.toMinutes(baseTime.toEpochMilli()), baseWindow); // should scale with unix time + + assertEquals(baseWindow, ProfilingWindowUtils.unixMillisToWindow(baseTime.toEpochMilli())); + assertEquals(baseWindow, ProfilingWindowUtils.unixMillisToWindow(baseTime.plusMillis(1).toEpochMilli())); + assertEquals(baseWindow, ProfilingWindowUtils.unixMillisToWindow(baseTime.plusSeconds(1).toEpochMilli())); + assertEquals(baseWindow, ProfilingWindowUtils.unixMillisToWindow(baseTime.plusSeconds(59).toEpochMilli())); + assertEquals(baseWindow + 1, ProfilingWindowUtils.unixMillisToWindow(baseTime.plusSeconds(60).toEpochMilli())); + assertEquals(baseWindow + 1, ProfilingWindowUtils.unixMillisToWindow(baseTime.plusSeconds(61).toEpochMilli())); + assertEquals(baseWindow - 1, ProfilingWindowUtils.unixMillisToWindow(baseTime.minusMillis(1).toEpochMilli())); + assertEquals(baseWindow - 1, ProfilingWindowUtils.unixMillisToWindow(baseTime.minusSeconds(1).toEpochMilli())); + } + + @Test + public void testKeepHistoryBefore() { + IntPredicate predicate = ProfilingWindowUtils.keepHistoryBefore(100); + assertFalse(predicate.test(99)); + assertFalse(predicate.test(100)); + assertFalse(predicate.test(101)); + + assertFalse(predicate.test(40)); + assertTrue(predicate.test(39)); + assertTrue(predicate.test(0)); + assertTrue(predicate.test(-10)); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoderTest.java b/spark-common/src/test/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoderTest.java new file mode 100644 index 00000000..82dcb125 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoderTest.java @@ -0,0 +1,54 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.window; + +import com.google.common.collect.ImmutableMap; +import org.junit.jupiter.api.Test; + +import java.util.concurrent.atomic.LongAdder; +import java.util.stream.IntStream; + +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class ProtoTimeEncoderTest { + + @Test + public void testSimple() { + ProtoTimeEncoder encoder = new ProtoTimeEncoder(l -> l, IntStream.of(7, 1, 3, 5)); + assertArrayEquals(new int[]{1, 3, 5, 7}, encoder.getKeys()); + + assertArrayEquals(new double[]{0, 0, 0, 0}, encoder.encode(ImmutableMap.of())); + assertArrayEquals(new double[]{0, 100, 0, 0}, encoder.encode(ImmutableMap.of(3, longAdder(100)))); + assertArrayEquals(new double[]{0, 100, 200, 0}, encoder.encode(ImmutableMap.of(3, longAdder(100), 5, longAdder(200)))); + + RuntimeException ex = assertThrows(RuntimeException.class, () -> encoder.encode(ImmutableMap.of(9, longAdder(300)))); + assertTrue(ex.getMessage().startsWith("No index for key 9")); + } + + private static LongAdder longAdder(long l) { + LongAdder longAdder = new LongAdder(); + longAdder.add(l); + return longAdder; + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/tick/TickHookTest.java b/spark-common/src/test/java/me/lucko/spark/common/tick/TickHookTest.java new file mode 100644 index 00000000..f9202620 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/tick/TickHookTest.java @@ -0,0 +1,69 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.tick; + +import com.google.common.collect.ImmutableList; +import org.junit.jupiter.api.Test; + +import java.util.ArrayList; +import java.util.List; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class TickHookTest { + + @Test + public void testAbstractHook() { + AbstractTickHook hook = new AbstractTickHook() { + @Override + public void start() { + + } + + @Override + public void close() { + + } + }; + + assertEquals(0, hook.getCurrentTick()); + + List ticks = new ArrayList<>(); + TickHook.Callback callback = ticks::add; + + hook.addCallback(callback); + + hook.onTick(); + assertEquals(1, hook.getCurrentTick()); + assertEquals(ImmutableList.of(0), ticks); + + hook.onTick(); + assertEquals(2, hook.getCurrentTick()); + assertEquals(ImmutableList.of(0, 1), ticks); + + hook.removeCallback(callback); + + hook.onTick(); + assertEquals(3, hook.getCurrentTick()); + assertEquals(ImmutableList.of(0, 1), ticks); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/tick/TickReporterTest.java b/spark-common/src/test/java/me/lucko/spark/common/tick/TickReporterTest.java new file mode 100644 index 00000000..f7402c5c --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/tick/TickReporterTest.java @@ -0,0 +1,94 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.tick; + +import com.google.common.collect.ImmutableList; +import org.junit.jupiter.api.Test; + +import java.util.ArrayList; +import java.util.List; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class TickReporterTest { + + @Test + public void testAbstractReporter() { + AbstractTickReporter reporter = new AbstractTickReporter() { + @Override + public void start() { + + } + + @Override + public void close() { + + } + }; + + List durations = new ArrayList<>(); + TickReporter.Callback callback = durations::add; + + reporter.addCallback(callback); + + reporter.onTick(1.0); + assertEquals(ImmutableList.of(1.0), durations); + + reporter.onTick(2.0); + assertEquals(ImmutableList.of(1.0, 2.0), durations); + + reporter.removeCallback(callback); + + reporter.onTick(3.0); + assertEquals(ImmutableList.of(1.0, 2.0), durations); + } + + @Test + public void testSimpleReporter() { + SimpleTickReporter reporter = new SimpleTickReporter() { + @Override + public void start() { + + } + }; + + List durations = new ArrayList<>(); + TickReporter.Callback callback = durations::add; + + reporter.addCallback(callback); + + reporter.onStart(); + assertEquals(0, durations.size()); + + try { + Thread.sleep(10); + } catch (InterruptedException e) { + // ignore + } + + reporter.onEnd(); + + assertEquals(1, durations.size()); + assertTrue(durations.get(0) > 0); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/util/FormatUtilTest.java b/spark-common/src/test/java/me/lucko/spark/common/util/FormatUtilTest.java new file mode 100644 index 00000000..5b77fb52 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/util/FormatUtilTest.java @@ -0,0 +1,67 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class FormatUtilTest { + + @Test + public void testPercent() { + assertEquals("50%", FormatUtil.percent(0.5, 1)); + assertEquals("100%", FormatUtil.percent(1, 1)); + assertEquals("0%", FormatUtil.percent(0, 1)); + + assertEquals("50%", FormatUtil.percent(50, 100)); + assertEquals("100%", FormatUtil.percent(100, 100)); + assertEquals("0%", FormatUtil.percent(0, 100)); + } + + @Test + public void testBytes() { + assertEquals("0 bytes", FormatUtil.formatBytes(0)); + assertEquals("1.0 bytes", FormatUtil.formatBytes(1)); + assertEquals("1.0 KB", FormatUtil.formatBytes(1024)); + assertEquals("1.0 MB", FormatUtil.formatBytes(1024 * 1024)); + assertEquals("1.0 GB", FormatUtil.formatBytes(1024 * 1024 * 1024)); + assertEquals("1.0 TB", FormatUtil.formatBytes(1024L * 1024 * 1024 * 1024)); + + assertEquals("2.5 KB", FormatUtil.formatBytes((long) (1024 * 2.5d))); + assertEquals("2.5 MB", FormatUtil.formatBytes((long) (1024 * 1024 * 2.5d))); + } + + @Test + public void testSeconds() { + assertEquals("0s", FormatUtil.formatSeconds(0)); + assertEquals("1s", FormatUtil.formatSeconds(1)); + assertEquals("59s", FormatUtil.formatSeconds(59)); + assertEquals("1m", FormatUtil.formatSeconds(60)); + assertEquals("1m 1s", FormatUtil.formatSeconds(61)); + assertEquals("1m 59s", FormatUtil.formatSeconds(119)); + assertEquals("2m", FormatUtil.formatSeconds(120)); + assertEquals("2m 1s", FormatUtil.formatSeconds(121)); + assertEquals("2m 59s", FormatUtil.formatSeconds(179)); + assertEquals("3m", FormatUtil.formatSeconds(180)); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/util/IndexedListBuilderTest.java b/spark-common/src/test/java/me/lucko/spark/common/util/IndexedListBuilderTest.java new file mode 100644 index 00000000..bb3820e7 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/util/IndexedListBuilderTest.java @@ -0,0 +1,42 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util; + +import org.junit.jupiter.api.Test; + +import java.util.Arrays; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class IndexedListBuilderTest { + + @Test + public void testIndexes() { + IndexedListBuilder builder = new IndexedListBuilder<>(); + + assertEquals(0, builder.add("a")); + assertEquals(1, builder.add("b")); + assertEquals(2, builder.add("c")); + + assertEquals(Arrays.asList("a", "b", "c"), builder.build()); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/util/JavaVersionTest.java b/spark-common/src/test/java/me/lucko/spark/common/util/JavaVersionTest.java new file mode 100644 index 00000000..470f4d07 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/util/JavaVersionTest.java @@ -0,0 +1,41 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class JavaVersionTest { + + @Test + public void testJavaVersion() { + assertEquals(7, JavaVersion.parseJavaVersion("1.7")); + assertEquals(8, JavaVersion.parseJavaVersion("1.8")); + assertEquals(9, JavaVersion.parseJavaVersion("9")); + assertEquals(11, JavaVersion.parseJavaVersion("11")); + assertEquals(17, JavaVersion.parseJavaVersion("17")); + assertEquals(9, JavaVersion.parseJavaVersion("9.0.1")); + assertEquals(11, JavaVersion.parseJavaVersion("11.0.1")); + assertEquals(17, JavaVersion.parseJavaVersion("17.0.1")); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/util/MethodDisambiguatorTest.java b/spark-common/src/test/java/me/lucko/spark/common/util/MethodDisambiguatorTest.java new file mode 100644 index 00000000..ad2f1e1d --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/util/MethodDisambiguatorTest.java @@ -0,0 +1,66 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util; + +import me.lucko.spark.common.util.MethodDisambiguator.MethodDescription; +import me.lucko.spark.common.util.classfinder.FallbackClassFinder; +import me.lucko.spark.test.TestClass; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; +import org.junit.jupiter.params.provider.ValueSource; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; + +public class MethodDisambiguatorTest { + + private static final MethodDisambiguator DISAMBIGUATOR = new MethodDisambiguator(FallbackClassFinder.INSTANCE); + + @ParameterizedTest + @CsvSource({ + "25, test(Ljava/lang/String;)V", + "26, test(Ljava/lang/String;)V", + "27, test(Ljava/lang/String;)V", + "28, test(Ljava/lang/String;)V", + "31, test(I)V", + "32, test(I)V", + "33, test(I)V", + "34, test(I)V", + "37, test(Z)V", + "38, test(Z)V", + "39, test(Z)V", + "40, test(Z)V", + }) + public void testSuccessfulDisambiguate(int line, String expectedDesc) { + MethodDescription method = DISAMBIGUATOR.disambiguate(TestClass.class.getName(), "test", line).orElse(null); + assertNotNull(method); + assertEquals(expectedDesc, method.toString()); + } + + @ParameterizedTest + @ValueSource(ints = {24, 29, 100}) + public void testUnsuccessfulDisambiguate(int line) { + MethodDescription method = DISAMBIGUATOR.disambiguate(TestClass.class.getName(), "test", line).orElse(null); + assertNull(method); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/util/RollingAverageTest.java b/spark-common/src/test/java/me/lucko/spark/common/util/RollingAverageTest.java new file mode 100644 index 00000000..a5b4a00d --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/util/RollingAverageTest.java @@ -0,0 +1,81 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util; + +import org.junit.jupiter.api.Test; + +import java.math.BigDecimal; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class RollingAverageTest { + + @Test + public void testMean() { + RollingAverage ra = new RollingAverage(3); + ra.add(BigDecimal.valueOf(1)); + ra.add(BigDecimal.valueOf(2)); + ra.add(BigDecimal.valueOf(3)); + + assertEquals(2, ra.mean()); + ra.add(BigDecimal.valueOf(4)); + assertEquals(3, ra.mean()); + ra.add(BigDecimal.valueOf(5)); + assertEquals(4, ra.mean()); + ra.add(BigDecimal.valueOf(6)); + assertEquals(5, ra.mean()); + } + + @Test + public void testMax() { + RollingAverage ra = new RollingAverage(3); + ra.add(BigDecimal.valueOf(1)); + ra.add(BigDecimal.valueOf(2)); + ra.add(BigDecimal.valueOf(3)); + + assertEquals(3, ra.max()); + } + + @Test + public void testMin() { + RollingAverage ra = new RollingAverage(3); + ra.add(BigDecimal.valueOf(1)); + ra.add(BigDecimal.valueOf(2)); + ra.add(BigDecimal.valueOf(3)); + + assertEquals(1, ra.min()); + } + + @Test + public void testPercentile() { + RollingAverage ra = new RollingAverage(3); + ra.add(BigDecimal.valueOf(1)); + ra.add(BigDecimal.valueOf(2)); + ra.add(BigDecimal.valueOf(3)); + + assertEquals(1, ra.percentile(0)); + assertEquals(2, ra.percentile(0.25)); + assertEquals(2, ra.percentile(0.5)); + assertEquals(3, ra.percentile(0.75)); + assertEquals(3, ra.percentile(1)); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/util/TemporaryFilesTest.java b/spark-common/src/test/java/me/lucko/spark/common/util/TemporaryFilesTest.java new file mode 100644 index 00000000..9e073818 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/util/TemporaryFilesTest.java @@ -0,0 +1,77 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class TemporaryFilesTest { + + @Test + public void testDelete(@TempDir Path tempDir) throws IOException { + Path dir = tempDir.resolve("test"); + TemporaryFiles temporaryFiles = new TemporaryFiles(dir); + + assertTrue(Files.exists(dir) && Files.isDirectory(dir)); + assertTrue(Files.exists(dir.resolve("about.txt"))); + assertEquals("# What is this directory?", Files.readAllLines(dir.resolve("about.txt")).get(0)); + + Path temporaryFile = temporaryFiles.create("test", ".txt"); + Files.write(temporaryFile, "Hello, world!".getBytes()); + + assertTrue(Files.exists(temporaryFile)); + temporaryFiles.deleteTemporaryFiles(); + assertFalse(Files.exists(temporaryFile)); + } + + @Test + public void testCleanupOnInit(@TempDir Path tempDir) throws IOException { + Path dir = tempDir.resolve("test"); + + Path nestedDirectory = dir.resolve("hello").resolve("world"); + Files.createDirectories(nestedDirectory); + + Path testFile = nestedDirectory.resolve("file.txt"); + Files.write(testFile, "Hello, world!".getBytes()); + assertTrue(Files.exists(testFile)); + + TemporaryFiles temporaryFiles = new TemporaryFiles(dir); + + assertFalse(Files.exists(testFile)); + } + + @Test + public void testSecondInit(@TempDir Path tempDir) throws IOException { + Path dir = tempDir.resolve("test"); + + TemporaryFiles temporaryFiles = new TemporaryFiles(dir); + TemporaryFiles temporaryFiles2 = new TemporaryFiles(dir); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/util/ThreadFinderTest.java b/spark-common/src/test/java/me/lucko/spark/common/util/ThreadFinderTest.java new file mode 100644 index 00000000..bffbf278 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/util/ThreadFinderTest.java @@ -0,0 +1,50 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util; + +import org.junit.jupiter.api.Test; + +import java.util.List; +import java.util.stream.Collectors; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class ThreadFinderTest { + + @Test + public void testFindThread() { + Thread thread = new Thread(() -> { + try { + Thread.sleep(100_000); + } catch (InterruptedException e) { + // ignore + } + }, "test-thread-1"); + thread.start(); + + ThreadFinder threadFinder = new ThreadFinder(); + List threads = threadFinder.getThreads().collect(Collectors.toList()); + assertTrue(threads.contains(thread)); + + thread.interrupt(); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/test/TestClass.java b/spark-common/src/test/java/me/lucko/spark/test/TestClass.java new file mode 100644 index 00000000..bd7ff4af --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/test/TestClass.java @@ -0,0 +1,41 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.test; + +public class TestClass { + public void test(String string) { + System.out.println("Hello, world!"); + System.out.println("Hello, world!"); + System.out.println("Hello, world!"); + } + + public void test(int integer) { + System.out.println("Hello, world!"); + System.out.println("Hello, world!"); + System.out.println("Hello, world!"); + } + + public void test(boolean bool) { + System.out.println("Hello, world!"); + System.out.println("Hello, world!"); + System.out.println("Hello, world!"); + } +} diff --git a/spark-common/src/test/java/me/lucko/spark/test/TestClass2.java b/spark-common/src/test/java/me/lucko/spark/test/TestClass2.java new file mode 100644 index 00000000..17ee88ef --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/test/TestClass2.java @@ -0,0 +1,52 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.test; + +public class TestClass2 implements Runnable { + + @Override + public void run() { + while (true) { + try { + test(); + } catch (InterruptedException e) { + return; + } + } + } + + public void test() throws InterruptedException { + for (int i = 0; i < 10; i++) { + testA(); + Thread.sleep(100); + testB(); + } + } + + public void testA() throws InterruptedException { + Thread.sleep(100); + } + + public void testB() throws InterruptedException { + Thread.sleep(100); + + } +} diff --git a/spark-common/src/test/java/me/lucko/spark/test/plugin/TestCommandSender.java b/spark-common/src/test/java/me/lucko/spark/test/plugin/TestCommandSender.java new file mode 100644 index 00000000..294b0732 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/test/plugin/TestCommandSender.java @@ -0,0 +1,53 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.test.plugin; + +import me.lucko.spark.common.command.sender.CommandSender; +import net.kyori.adventure.text.Component; +import net.kyori.adventure.text.serializer.ansi.ANSIComponentSerializer; + +import java.util.UUID; + +public enum TestCommandSender implements CommandSender { + INSTANCE; + + private final UUID uniqueId = new UUID(0, 0); + + @Override + public String getName() { + return "Test"; + } + + @Override + public UUID getUniqueId() { + return this.uniqueId; + } + + @Override + public void sendMessage(Component message) { + System.out.println(ANSIComponentSerializer.ansi().serialize(message)); + } + + @Override + public boolean hasPermission(String permission) { + return true; + } +} diff --git a/spark-common/src/test/java/me/lucko/spark/test/plugin/TestSparkPlugin.java b/spark-common/src/test/java/me/lucko/spark/test/plugin/TestSparkPlugin.java new file mode 100644 index 00000000..866a2b66 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/test/plugin/TestSparkPlugin.java @@ -0,0 +1,138 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.test.plugin; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.SparkPlugin; +import me.lucko.spark.common.command.sender.CommandSender; +import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.util.classfinder.ClassFinder; +import me.lucko.spark.common.util.classfinder.FallbackClassFinder; + +import java.nio.file.Path; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.stream.Stream; + +public class TestSparkPlugin implements SparkPlugin, AutoCloseable { + + private static final Logger LOGGER = Logger.getLogger("spark-test"); + private static final ScheduledExecutorService EXECUTOR_SERVICE = Executors.newScheduledThreadPool(16); + + private final Path directory; + private final Map props; + + private final SparkPlatform platform; + + public TestSparkPlugin(Path directory, Map config) { + this.directory = directory; + this.props = new HashMap<>(config); + this.props.putIfAbsent("backgroundProfiler", "false"); + + this.props.forEach((k, v) -> System.setProperty("spark." + k, v)); + this.platform = new SparkPlatform(this); + this.platform.enable(); + } + + public TestSparkPlugin(Path directory) { + this(directory, Collections.emptyMap()); + } + + public SparkPlatform platform() { + return this.platform; + } + + @Override + public void close() { + this.platform.disable(); + this.props.keySet().forEach((k) -> System.clearProperty("spark." + k)); + } + + @Override + public String getVersion() { + return "1.0-test"; + } + + @Override + public Path getPluginDirectory() { + return this.directory; + } + + @Override + public String getCommandName() { + return "spark"; + } + + @Override + public Stream getCommandSenders() { + return Stream.of(TestCommandSender.INSTANCE); + } + + @Override + public void executeAsync(Runnable task) { + EXECUTOR_SERVICE.execute(task); + } + + @Override + public void log(Level level, String msg) { + LOGGER.log(level, msg); + } + + @Override + public PlatformInfo getPlatformInfo() { + return new PlatformInfo() { + @Override + public Type getType() { + return Type.SERVER; + } + + @Override + public String getName() { + return "Test"; + } + + @Override + public String getBrand() { + return "Test"; + } + + @Override + public String getVersion() { + return "v1.0-test"; + } + + @Override + public String getMinecraftVersion() { + return null; + } + }; + } + + @Override + public ClassFinder createClassFinder() { + return FallbackClassFinder.INSTANCE; + } +} diff --git a/spark-fabric/build.gradle b/spark-fabric/build.gradle index 5ab88ee0..a9cef99e 100644 --- a/spark-fabric/build.gradle +++ b/spark-fabric/build.gradle @@ -1,8 +1,8 @@ import net.fabricmc.loom.task.RemapJarTask plugins { - id 'fabric-loom' version '1.6-SNAPSHOT' - id 'com.github.johnrengelman.shadow' version '8.1.1' + id 'fabric-loom' version '1.7-SNAPSHOT' + id 'com.gradleup.shadow' version '8.3.0' } tasks.withType(JavaCompile) { @@ -28,9 +28,9 @@ configurations { dependencies { // https://modmuss50.me/fabric.html - minecraft 'com.mojang:minecraft:1.20.5' - mappings 'net.fabricmc:yarn:1.20.5+build.1:v2' - modImplementation 'net.fabricmc:fabric-loader:0.15.10' + minecraft 'com.mojang:minecraft:1.21.1' + mappings 'net.fabricmc:yarn:1.21.1+build.3:v2' + modImplementation 'net.fabricmc:fabric-loader:0.15.11' Set apiModules = [ "fabric-api-base", @@ -40,12 +40,12 @@ dependencies { // Add each module as a dependency apiModules.forEach { - modImplementation(fabricApi.module(it, '0.97.5+1.20.5')) + modImplementation(fabricApi.module(it, '0.102.1+1.21.1')) } include(modImplementation('me.lucko:fabric-permissions-api:0.3.1')) - modImplementation('eu.pb4:placeholder-api:2.4.0-pre.1+1.20.5') + modImplementation('eu.pb4:placeholder-api:2.4.1+1.21') shade project(':spark-common') } @@ -76,6 +76,7 @@ shadowJar { relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' + relocate 'net.kyori.option', 'me.lucko.spark.lib.adventure.option' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' // relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' @@ -86,6 +87,8 @@ shadowJar { exclude 'module-info.class' exclude 'META-INF/maven/**' exclude 'META-INF/proguard/**' + exclude '**/*.proto' + exclude '**/*.proto.bin' dependencies { exclude(dependency('org.ow2.asm::')) diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java index ef22d9c3..c79c2b04 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java @@ -21,16 +21,13 @@ package me.lucko.spark.fabric; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.common.sampler.source.ClassSourceLookup; -import me.lucko.spark.common.util.ClassFinder; +import me.lucko.spark.common.util.classfinder.ClassFinder; import me.lucko.spark.fabric.smap.MixinUtils; import me.lucko.spark.fabric.smap.SourceMap; import me.lucko.spark.fabric.smap.SourceMapProvider; - import net.fabricmc.loader.api.FabricLoader; import net.fabricmc.loader.api.ModContainer; - import org.objectweb.asm.Type; import org.spongepowered.asm.mixin.FabricUtil; import org.spongepowered.asm.mixin.extensibility.IMixinConfig; @@ -44,14 +41,15 @@ import java.util.Map; public class FabricClassSourceLookup extends ClassSourceLookup.ByCodeSource { - - private final ClassFinder classFinder = new ClassFinder(); - private final SourceMapProvider smapProvider = new SourceMapProvider(); - + private final ClassFinder classFinder; + private final SourceMapProvider smapProvider; private final Path modsDirectory; private final Map pathToModMap; - public FabricClassSourceLookup() { + public FabricClassSourceLookup(ClassFinder classFinder) { + this.classFinder = classFinder; + this.smapProvider = new SourceMapProvider(); + FabricLoader loader = FabricLoader.getInstance(); this.modsDirectory = loader.getGameDir().resolve("mods").toAbsolutePath().normalize(); this.pathToModMap = constructPathToModIdMap(loader.getAllMods()); diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricCommandSender.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricCommandSender.java index 7da89966..64230183 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricCommandSender.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricCommandSender.java @@ -22,7 +22,6 @@ import me.lucko.spark.common.command.sender.AbstractCommandSender; import me.lucko.spark.fabric.plugin.FabricSparkPlugin; - import net.kyori.adventure.text.Component; import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer; import net.minecraft.entity.player.PlayerEntity; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricExtraMetadataProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricExtraMetadataProvider.java deleted file mode 100644 index 91797bb4..00000000 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricExtraMetadataProvider.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.fabric; - -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; - -import me.lucko.spark.common.platform.MetadataProvider; - -import net.minecraft.resource.ResourcePackManager; -import net.minecraft.resource.ResourcePackProfile; -import net.minecraft.resource.ResourcePackSource; - -import java.util.LinkedHashMap; -import java.util.Map; - -public class FabricExtraMetadataProvider implements MetadataProvider { - - private final ResourcePackManager resourcePackManager; - - public FabricExtraMetadataProvider(ResourcePackManager resourcePackManager) { - this.resourcePackManager = resourcePackManager; - } - - @Override - public Map get() { - Map metadata = new LinkedHashMap<>(); - metadata.put("datapacks", datapackMetadata()); - return metadata; - } - - private JsonElement datapackMetadata() { - JsonObject datapacks = new JsonObject(); - for (ResourcePackProfile profile : this.resourcePackManager.getEnabledProfiles()) { - JsonObject obj = new JsonObject(); - obj.addProperty("name", profile.getDisplayName().getString()); - obj.addProperty("description", profile.getDescription().getString()); - obj.addProperty("source", resourcePackSource(profile.getSource())); - datapacks.add(profile.getId(), obj); - } - return datapacks; - } - - private static String resourcePackSource(ResourcePackSource source) { - if (source == ResourcePackSource.NONE) { - return "none"; - } else if (source == ResourcePackSource.BUILTIN) { - return "builtin"; - } else if (source == ResourcePackSource.WORLD) { - return "world"; - } else if (source == ResourcePackSource.SERVER) { - return "server"; - } else { - return "unknown"; - } - } -} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlatformInfo.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlatformInfo.java index e298121e..57251e1f 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlatformInfo.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlatformInfo.java @@ -21,7 +21,6 @@ package me.lucko.spark.fabric; import me.lucko.spark.common.platform.PlatformInfo; - import net.fabricmc.loader.api.FabricLoader; import java.util.Optional; @@ -43,6 +42,13 @@ public String getName() { return "Fabric"; } + @Override + public String getBrand() { + return FabricLoader.getInstance().getModContainer("fabric") + .map(container -> container.getMetadata().getName()) + .orElse("Fabric"); + } + @Override public String getVersion() { return getModVersion("fabricloader").orElse("unknown"); diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlayerPingProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlayerPingProvider.java index fefe284d..42b6b373 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlayerPingProvider.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlayerPingProvider.java @@ -21,9 +21,7 @@ package me.lucko.spark.fabric; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.common.monitor.ping.PlayerPingProvider; - import net.minecraft.server.MinecraftServer; import net.minecraft.server.network.ServerPlayerEntity; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java index 325a324d..bc70228d 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java @@ -22,7 +22,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; - import me.lucko.spark.common.platform.serverconfig.ConfigParser; import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricSparkMod.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricSparkMod.java index ad419f7f..23a518b2 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricSparkMod.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricSparkMod.java @@ -21,10 +21,8 @@ package me.lucko.spark.fabric; import com.mojang.brigadier.CommandDispatcher; - import me.lucko.spark.fabric.plugin.FabricClientSparkPlugin; import me.lucko.spark.fabric.plugin.FabricServerSparkPlugin; - import net.fabricmc.api.ModInitializer; import net.fabricmc.fabric.api.command.v2.CommandRegistrationCallback; import net.fabricmc.fabric.api.event.lifecycle.v1.ServerLifecycleEvents; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricTickHook.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricTickHook.java index b76ad9bf..4f25f2b4 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricTickHook.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricTickHook.java @@ -22,7 +22,6 @@ import me.lucko.spark.common.tick.AbstractTickHook; import me.lucko.spark.common.tick.TickHook; - import net.fabricmc.fabric.api.client.event.lifecycle.v1.ClientTickEvents; import net.fabricmc.fabric.api.event.lifecycle.v1.ServerTickEvents; import net.minecraft.client.MinecraftClient; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricTickReporter.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricTickReporter.java index b71ac848..6a6addde 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricTickReporter.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricTickReporter.java @@ -22,7 +22,6 @@ import me.lucko.spark.common.tick.SimpleTickReporter; import me.lucko.spark.common.tick.TickReporter; - import net.fabricmc.fabric.api.client.event.lifecycle.v1.ClientTickEvents; import net.fabricmc.fabric.api.event.lifecycle.v1.ServerTickEvents; import net.minecraft.client.MinecraftClient; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java index 156db89c..514be5b6 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java @@ -20,9 +20,7 @@ package me.lucko.spark.fabric; -import it.unimi.dsi.fastutil.longs.LongIterator; -import it.unimi.dsi.fastutil.longs.LongSet; - +import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap; import me.lucko.spark.common.platform.world.AbstractChunkInfo; import me.lucko.spark.common.platform.world.CountMap; import me.lucko.spark.common.platform.world.WorldInfoProvider; @@ -30,39 +28,56 @@ import me.lucko.spark.fabric.mixin.ClientWorldAccessor; import me.lucko.spark.fabric.mixin.ServerEntityManagerAccessor; import me.lucko.spark.fabric.mixin.ServerWorldAccessor; - +import me.lucko.spark.fabric.mixin.WorldAccessor; +import net.fabricmc.loader.api.FabricLoader; import net.minecraft.client.MinecraftClient; -import net.minecraft.client.world.ClientEntityManager; import net.minecraft.client.world.ClientWorld; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityType; +import net.minecraft.resource.ResourcePackManager; +import net.minecraft.resource.ResourcePackSource; import net.minecraft.server.MinecraftServer; import net.minecraft.server.world.ServerEntityManager; import net.minecraft.server.world.ServerWorld; import net.minecraft.util.math.ChunkPos; +import net.minecraft.world.GameRules; +import net.minecraft.world.entity.ClientEntityManager; import net.minecraft.world.entity.EntityIndex; -import net.minecraft.world.entity.EntityTrackingSection; -import net.minecraft.world.entity.SectionedEntityCache; +import net.minecraft.world.entity.EntityLookup; -import java.util.ArrayList; +import java.lang.reflect.Method; +import java.util.Collection; import java.util.HashMap; import java.util.List; -import java.util.stream.Stream; +import java.util.stream.Collectors; public abstract class FabricWorldInfoProvider implements WorldInfoProvider { - protected List getChunksFromCache(SectionedEntityCache cache) { - LongSet loadedChunks = cache.getChunkPositions(); - List list = new ArrayList<>(loadedChunks.size()); - - for (LongIterator iterator = loadedChunks.iterator(); iterator.hasNext(); ) { - long chunkPos = iterator.nextLong(); - Stream> sections = cache.getTrackingSections(chunkPos); + protected abstract ResourcePackManager getResourcePackManager(); + + @Override + public Collection pollDataPacks() { + return getResourcePackManager().getEnabledProfiles().stream() + .map(pack -> new DataPackInfo( + pack.getId(), + pack.getDescription().getString(), + resourcePackSource(pack.getSource()) + )) + .collect(Collectors.toList()); + } - list.add(new FabricChunkInfo(chunkPos, sections)); + private static String resourcePackSource(ResourcePackSource source) { + if (source == ResourcePackSource.NONE) { + return "none"; + } else if (source == ResourcePackSource.BUILTIN) { + return "builtin"; + } else if (source == ResourcePackSource.WORLD) { + return "world"; + } else if (source == ResourcePackSource.SERVER) { + return "server"; + } else { + return "unknown"; } - - return list; } public static final class Server extends FabricWorldInfoProvider { @@ -79,10 +94,15 @@ public CountsResult pollCounts() { int chunks = 0; for (ServerWorld world : this.server.getWorlds()) { - ServerEntityManager entityManager = ((ServerWorldAccessor) world).getEntityManager(); - EntityIndex entityIndex = ((ServerEntityManagerAccessor) entityManager).getIndex(); - entities += entityIndex.size(); + if (FabricLoader.getInstance().isModLoaded("moonrise")) { + entities += MoonriseMethods.getEntityCount(((WorldAccessor) world).spark$getEntityLookup()); + } else { + ServerEntityManager entityManager = ((ServerWorldAccessor) world).getEntityManager(); + EntityIndex entityIndex = ((ServerEntityManagerAccessor) entityManager).getIndex(); + entities += entityIndex.size(); + } + chunks += world.getChunkManager().getLoadedChunkCount(); } @@ -94,15 +114,46 @@ public ChunksResult pollChunks() { ChunksResult data = new ChunksResult<>(); for (ServerWorld world : this.server.getWorlds()) { - ServerEntityManager entityManager = ((ServerWorldAccessor) world).getEntityManager(); - SectionedEntityCache cache = ((ServerEntityManagerAccessor) entityManager).getCache(); + Long2ObjectOpenHashMap worldInfos = new Long2ObjectOpenHashMap<>(); - List list = getChunksFromCache(cache); - data.put(world.getRegistryKey().getValue().getPath(), list); + for (Entity entity : ((WorldAccessor) world).spark$getEntityLookup().iterate()) { + FabricChunkInfo info = worldInfos.computeIfAbsent( + entity.getChunkPos().toLong(), FabricChunkInfo::new); + info.entityCounts.increment(entity.getType()); + } + + data.put(world.getRegistryKey().getValue().getPath(), List.copyOf(worldInfos.values())); } return data; } + + @Override + public GameRulesResult pollGameRules() { + GameRulesResult data = new GameRulesResult(); + Iterable worlds = this.server.getWorlds(); + + GameRules.accept(new GameRules.Visitor() { + @Override + public > void visit(GameRules.Key key, GameRules.Type type) { + String defaultValue = type.createRule().serialize(); + data.putDefault(key.getName(), defaultValue); + + for (ServerWorld world : worlds) { + String worldName = world.getRegistryKey().getValue().getPath(); + + String value = world.getGameRules().get(key).serialize(); + data.put(key.getName(), worldName, value); + } + } + }); + return data; + } + + @Override + protected ResourcePackManager getResourcePackManager() { + return this.server.getDataPackManager(); + } } public static final class Client extends FabricWorldInfoProvider { @@ -119,10 +170,16 @@ public CountsResult pollCounts() { return null; } - ClientEntityManager entityManager = ((ClientWorldAccessor) world).getEntityManager(); - EntityIndex entityIndex = ((ClientEntityManagerAccessor) entityManager).getIndex(); + int entities; + + if (FabricLoader.getInstance().isModLoaded("moonrise")) { + entities = MoonriseMethods.getEntityCount(((WorldAccessor) world).spark$getEntityLookup()); + } else { + ClientEntityManager entityManager = ((ClientWorldAccessor) world).getEntityManager(); + EntityIndex entityIndex = ((ClientEntityManagerAccessor) entityManager).getIndex(); + entities = entityIndex.size(); + } - int entities = entityIndex.size(); int chunks = world.getChunkManager().getLoadedChunkCount(); return new CountsResult(-1, entities, -1, chunks); @@ -130,37 +187,64 @@ public CountsResult pollCounts() { @Override public ChunksResult pollChunks() { + ClientWorld world = this.client.world; + if (world == null) { + return null; + } + ChunksResult data = new ChunksResult<>(); + Long2ObjectOpenHashMap worldInfos = new Long2ObjectOpenHashMap<>(); + + for (Entity entity : ((WorldAccessor) world).spark$getEntityLookup().iterate()) { + FabricChunkInfo info = worldInfos.computeIfAbsent(entity.getChunkPos().toLong(), FabricChunkInfo::new); + info.entityCounts.increment(entity.getType()); + } + + data.put(world.getRegistryKey().getValue().getPath(), List.copyOf(worldInfos.values())); + + return data; + } + + @Override + public GameRulesResult pollGameRules() { ClientWorld world = this.client.world; if (world == null) { return null; } - ClientEntityManager entityManager = ((ClientWorldAccessor) world).getEntityManager(); - SectionedEntityCache cache = ((ClientEntityManagerAccessor) entityManager).getCache(); + GameRulesResult data = new GameRulesResult(); - List list = getChunksFromCache(cache); - data.put(world.getRegistryKey().getValue().getPath(), list); + String worldName = world.getRegistryKey().getValue().getPath(); + GameRules worldRules = world.getGameRules(); + + GameRules.accept(new GameRules.Visitor() { + @Override + public > void visit(GameRules.Key key, GameRules.Type type) { + String defaultValue = type.createRule().serialize(); + data.putDefault(key.getName(), defaultValue); + + String value = worldRules.get(key).serialize(); + data.put(key.getName(), worldName, value); + } + }); return data; } + + @Override + protected ResourcePackManager getResourcePackManager() { + return this.client.getResourcePackManager(); + } } static final class FabricChunkInfo extends AbstractChunkInfo> { private final CountMap> entityCounts; - FabricChunkInfo(long chunkPos, Stream> entities) { + FabricChunkInfo(long chunkPos) { super(ChunkPos.getPackedX(chunkPos), ChunkPos.getPackedZ(chunkPos)); this.entityCounts = new CountMap.Simple<>(new HashMap<>()); - entities.forEach(section -> { - if (section.getStatus().shouldTrack()) { - section.stream().forEach(entity -> - this.entityCounts.increment(entity.getType()) - ); - } - }); } @Override @@ -174,5 +258,28 @@ public String entityTypeName(EntityType type) { } } + private static final class MoonriseMethods { + private static Method getEntityCount; + + private static Method getEntityCountMethod(EntityLookup getter) { + if (getEntityCount == null) { + try { + getEntityCount = getter.getClass().getMethod("getEntityCount"); + } catch (final ReflectiveOperationException e) { + throw new RuntimeException("Cannot find Moonrise getEntityCount method", e); + } + } + return getEntityCount; + } + + private static int getEntityCount(EntityLookup getter) { + try { + return (int) getEntityCountMethod(getter).invoke(getter); + } catch (final ReflectiveOperationException e) { + throw new RuntimeException("Failed to invoke Moonrise getEntityCount method", e); + } + } + } + } diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java index 994c9a39..d2c266c7 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java @@ -20,11 +20,10 @@ package me.lucko.spark.fabric.mixin; -import net.minecraft.client.world.ClientEntityManager; import net.minecraft.entity.Entity; +import net.minecraft.world.entity.ClientEntityManager; import net.minecraft.world.entity.EntityIndex; import net.minecraft.world.entity.SectionedEntityCache; - import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.gen.Accessor; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java index 01562efe..3940101a 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java @@ -20,10 +20,9 @@ package me.lucko.spark.fabric.mixin; -import net.minecraft.client.world.ClientEntityManager; import net.minecraft.client.world.ClientWorld; import net.minecraft.entity.Entity; - +import net.minecraft.world.entity.ClientEntityManager; import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.gen.Accessor; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/MinecraftClientAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/MinecraftClientAccessor.java index 7a4fb785..dfe9494d 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/MinecraftClientAccessor.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/MinecraftClientAccessor.java @@ -21,7 +21,6 @@ package me.lucko.spark.fabric.mixin; import net.minecraft.client.MinecraftClient; - import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.gen.Accessor; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java index 2c67502c..8acc76d7 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java @@ -24,7 +24,6 @@ import net.minecraft.server.world.ServerEntityManager; import net.minecraft.world.entity.EntityIndex; import net.minecraft.world.entity.SectionedEntityCache; - import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.gen.Accessor; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java index cf2e7e85..0254e750 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java @@ -23,7 +23,6 @@ import net.minecraft.entity.Entity; import net.minecraft.server.world.ServerEntityManager; import net.minecraft.server.world.ServerWorld; - import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.gen.Accessor; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/WorldAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/WorldAccessor.java new file mode 100644 index 00000000..e50dc20a --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/WorldAccessor.java @@ -0,0 +1,35 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric.mixin; + +import net.minecraft.entity.Entity; +import net.minecraft.world.World; +import net.minecraft.world.entity.EntityLookup; +import org.spongepowered.asm.mixin.Mixin; +import org.spongepowered.asm.mixin.gen.Invoker; + +@Mixin(World.class) +public interface WorldAccessor { + + @Invoker(value = "getEntityLookup") + EntityLookup spark$getEntityLookup(); + +} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java index dc4321da..4e627ac0 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java @@ -24,16 +24,13 @@ import eu.pb4.placeholders.api.PlaceholderHandler; import eu.pb4.placeholders.api.PlaceholderResult; import eu.pb4.placeholders.api.Placeholders; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.util.SparkPlaceholder; - import net.kyori.adventure.text.Component; import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer; import net.minecraft.registry.DynamicRegistryManager; import net.minecraft.text.Text; import net.minecraft.util.Identifier; - import org.jetbrains.annotations.Nullable; public enum SparkFabricPlaceholderApi { @@ -42,7 +39,7 @@ public enum SparkFabricPlaceholderApi { public static void register(SparkPlatform platform) { for (SparkPlaceholder placeholder : SparkPlaceholder.values()) { Placeholders.register( - new Identifier("spark", placeholder.getName()), + Identifier.of("spark", placeholder.getName()), new Handler(platform, placeholder) ); } diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java index faf4eefb..d80e0260 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java @@ -27,22 +27,18 @@ import com.mojang.brigadier.suggestion.SuggestionProvider; import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; - -import me.lucko.spark.common.platform.MetadataProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.fabric.FabricCommandSender; -import me.lucko.spark.fabric.FabricExtraMetadataProvider; import me.lucko.spark.fabric.FabricPlatformInfo; import me.lucko.spark.fabric.FabricSparkMod; import me.lucko.spark.fabric.FabricTickHook; import me.lucko.spark.fabric.FabricTickReporter; import me.lucko.spark.fabric.FabricWorldInfoProvider; import me.lucko.spark.fabric.mixin.MinecraftClientAccessor; - import net.fabricmc.fabric.api.client.command.v2.ClientCommandRegistrationCallback; import net.fabricmc.fabric.api.client.command.v2.FabricClientCommandSource; import net.fabricmc.fabric.api.client.event.lifecycle.v1.ClientLifecycleEvents; @@ -139,11 +135,6 @@ public TickReporter createTickReporter() { return new FabricTickReporter.Client(); } - @Override - public MetadataProvider createExtraMetadataProvider() { - return new FabricExtraMetadataProvider(this.minecraft.getResourcePackManager()); - } - @Override public WorldInfoProvider createWorldInfoProvider() { return new FabricWorldInfoProvider.Client(this.minecraft); diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java index 1606d57e..04bbdf05 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java @@ -27,10 +27,8 @@ import com.mojang.brigadier.suggestion.SuggestionProvider; import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; - import me.lucko.fabric.api.permissions.v0.Permissions; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; -import me.lucko.spark.common.platform.MetadataProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; @@ -38,7 +36,6 @@ import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.fabric.FabricCommandSender; -import me.lucko.spark.fabric.FabricExtraMetadataProvider; import me.lucko.spark.fabric.FabricPlatformInfo; import me.lucko.spark.fabric.FabricPlayerPingProvider; import me.lucko.spark.fabric.FabricServerConfigProvider; @@ -47,7 +44,6 @@ import me.lucko.spark.fabric.FabricTickReporter; import me.lucko.spark.fabric.FabricWorldInfoProvider; import me.lucko.spark.fabric.placeholder.SparkFabricPlaceholderApi; - import net.fabricmc.loader.api.FabricLoader; import net.minecraft.entity.player.PlayerEntity; import net.minecraft.server.MinecraftServer; @@ -171,11 +167,6 @@ public ServerConfigProvider createServerConfigProvider() { return new FabricServerConfigProvider(); } - @Override - public MetadataProvider createExtraMetadataProvider() { - return new FabricExtraMetadataProvider(this.server.getDataPackManager()); - } - @Override public WorldInfoProvider createWorldInfoProvider() { return new FabricWorldInfoProvider.Server(this.server); diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkMixinPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkMixinPlugin.java index 8b7e1381..cfc8c950 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkMixinPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkMixinPlugin.java @@ -21,7 +21,6 @@ package me.lucko.spark.fabric.plugin; import me.lucko.spark.fabric.smap.SourceDebugCache; - import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.objectweb.asm.tree.ClassNode; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java index 9a03b4e6..8f0c14db 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java @@ -30,7 +30,6 @@ import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; import com.mojang.brigadier.tree.LiteralCommandNode; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.command.sender.CommandSender; @@ -39,11 +38,9 @@ import me.lucko.spark.common.util.SparkThreadFactory; import me.lucko.spark.fabric.FabricClassSourceLookup; import me.lucko.spark.fabric.FabricSparkMod; - import net.fabricmc.loader.api.FabricLoader; import net.fabricmc.loader.api.metadata.Person; import net.minecraft.server.command.CommandOutput; - import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -112,7 +109,7 @@ public void log(Level level, String msg) { @Override public ClassSourceLookup createClassSourceLookup() { - return new FabricClassSourceLookup(); + return new FabricClassSourceLookup(createClassFinder()); } @Override @@ -123,7 +120,8 @@ public Collection getKnownSources() { mod -> mod.getMetadata().getVersion().getFriendlyString(), mod -> mod.getMetadata().getAuthors().stream() .map(Person::getName) - .collect(Collectors.joining(", ")) + .collect(Collectors.joining(", ")), + mod -> mod.getMetadata().getDescription() ); } diff --git a/spark-fabric/src/main/resources/spark.mixins.json b/spark-fabric/src/main/resources/spark.mixins.json index 27676a64..a8e280c2 100644 --- a/spark-fabric/src/main/resources/spark.mixins.json +++ b/spark-fabric/src/main/resources/spark.mixins.json @@ -8,7 +8,8 @@ ], "mixins": [ "ServerEntityManagerAccessor", - "ServerWorldAccessor" + "ServerWorldAccessor", + "WorldAccessor" ], "plugin": "me.lucko.spark.fabric.plugin.FabricSparkMixinPlugin" } \ No newline at end of file diff --git a/spark-forge/build.gradle b/spark-forge/build.gradle index 5f353225..1d627b11 100644 --- a/spark-forge/build.gradle +++ b/spark-forge/build.gradle @@ -1,15 +1,15 @@ plugins { - id 'com.github.johnrengelman.shadow' version '8.1.1' - id 'net.minecraftforge.gradle' version '[6.0,6.2)' + id 'com.gradleup.shadow' version '8.3.0' + id 'net.minecraftforge.gradle' version '[6.0.24,6.2)' } tasks.withType(JavaCompile) { - // override, compile targeting J17 - options.release = 17 + // override, compile targeting J21 + options.release = 21 } minecraft { - mappings channel: 'official', version: '1.20.4' + mappings channel: 'official', version: '1.21.1' accessTransformer = file('src/main/resources/META-INF/accesstransformer.cfg') } @@ -19,7 +19,7 @@ configurations { } dependencies { - minecraft 'net.minecraftforge:forge:1.20.4-49.0.3' + minecraft 'net.minecraftforge:forge:1.21.1-52.0.2' shade project(':spark-common') } @@ -43,6 +43,7 @@ shadowJar { relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' + relocate 'net.kyori.option', 'me.lucko.spark.lib.adventure.option' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' @@ -53,6 +54,8 @@ shadowJar { exclude 'module-info.class' exclude 'META-INF/maven/**' exclude 'META-INF/proguard/**' + exclude '**/*.proto' + exclude '**/*.proto.bin' } artifacts { diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClassSourceLookup.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClassSourceLookup.java index 82d66ca7..1f7d90d1 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClassSourceLookup.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClassSourceLookup.java @@ -20,9 +20,8 @@ package me.lucko.spark.forge; -import me.lucko.spark.common.sampler.source.ClassSourceLookup; - import cpw.mods.modlauncher.TransformingClassLoader; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; public class ForgeClassSourceLookup implements ClassSourceLookup { diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeCommandSender.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeCommandSender.java index 93536688..0e65657b 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeCommandSender.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeCommandSender.java @@ -22,10 +22,10 @@ import me.lucko.spark.common.command.sender.AbstractCommandSender; import me.lucko.spark.forge.plugin.ForgeSparkPlugin; - import net.kyori.adventure.text.Component; import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer; import net.minecraft.commands.CommandSource; +import net.minecraft.core.RegistryAccess; import net.minecraft.network.chat.Component.Serializer; import net.minecraft.network.chat.MutableComponent; import net.minecraft.server.MinecraftServer; @@ -66,7 +66,7 @@ public UUID getUniqueId() { @Override public void sendMessage(Component message) { - MutableComponent component = Serializer.fromJson(GsonComponentSerializer.gson().serializeToTree(message)); + MutableComponent component = Serializer.fromJson(GsonComponentSerializer.gson().serializeToTree(message), RegistryAccess.EMPTY); Objects.requireNonNull(component, "component"); super.delegate.sendSystemMessage(component); } diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeExtraMetadataProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeExtraMetadataProvider.java deleted file mode 100644 index cac2771b..00000000 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeExtraMetadataProvider.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.forge; - -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; - -import me.lucko.spark.common.platform.MetadataProvider; - -import net.minecraft.server.packs.repository.Pack; -import net.minecraft.server.packs.repository.PackRepository; -import net.minecraft.server.packs.repository.PackSource; - -import java.util.LinkedHashMap; -import java.util.Map; - -public class ForgeExtraMetadataProvider implements MetadataProvider { - - private final PackRepository resourcePackManager; - - public ForgeExtraMetadataProvider(PackRepository resourcePackManager) { - this.resourcePackManager = resourcePackManager; - } - - @Override - public Map get() { - Map metadata = new LinkedHashMap<>(); - metadata.put("datapacks", datapackMetadata()); - return metadata; - } - - private JsonElement datapackMetadata() { - JsonObject datapacks = new JsonObject(); - for (Pack profile : this.resourcePackManager.getSelectedPacks()) { - JsonObject obj = new JsonObject(); - obj.addProperty("name", profile.getTitle().getString()); - obj.addProperty("description", profile.getDescription().getString()); - obj.addProperty("source", resourcePackSource(profile.getPackSource())); - datapacks.add(profile.getId(), obj); - } - return datapacks; - } - - private static String resourcePackSource(PackSource source) { - if (source == PackSource.DEFAULT) { - return "none"; - } else if (source == PackSource.BUILT_IN) { - return "builtin"; - } else if (source == PackSource.WORLD) { - return "world"; - } else if (source == PackSource.SERVER) { - return "server"; - } else { - return "unknown"; - } - } -} diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlatformInfo.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlatformInfo.java index 97b3a86d..ede1996a 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlatformInfo.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlatformInfo.java @@ -21,7 +21,9 @@ package me.lucko.spark.forge; import me.lucko.spark.common.platform.PlatformInfo; - +import net.minecraftforge.fml.ModContainer; +import net.minecraftforge.fml.ModList; +import net.minecraftforge.forgespi.language.IModInfo; import net.minecraftforge.versions.forge.ForgeVersion; import net.minecraftforge.versions.mcp.MCPVersion; @@ -42,6 +44,14 @@ public String getName() { return "Forge"; } + @Override + public String getBrand() { + return ModList.get().getModContainerById(ForgeVersion.MOD_ID) + .map(ModContainer::getModInfo) + .map(IModInfo::getDisplayName) + .orElse("Forge"); + } + @Override public String getVersion() { return ForgeVersion.getVersion(); diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlayerPingProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlayerPingProvider.java index bb2cfc64..8b96b652 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlayerPingProvider.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlayerPingProvider.java @@ -21,9 +21,7 @@ package me.lucko.spark.forge; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.common.monitor.ping.PlayerPingProvider; - import net.minecraft.server.MinecraftServer; import net.minecraft.server.level.ServerPlayer; diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java index 6feba526..c05e7d55 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java @@ -22,7 +22,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; - import me.lucko.spark.common.platform.serverconfig.ConfigParser; import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeTickHook.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeTickHook.java index 36e70b05..84de9709 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeTickHook.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeTickHook.java @@ -22,7 +22,6 @@ import me.lucko.spark.common.tick.AbstractTickHook; import me.lucko.spark.common.tick.TickHook; - import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.TickEvent; import net.minecraftforge.eventbus.api.SubscribeEvent; diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeTickReporter.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeTickReporter.java index c33443b2..06d0f3ed 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeTickReporter.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeTickReporter.java @@ -22,7 +22,6 @@ import me.lucko.spark.common.tick.SimpleTickReporter; import me.lucko.spark.common.tick.TickReporter; - import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.TickEvent; import net.minecraftforge.eventbus.api.SubscribeEvent; diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java index 4750c089..b31b825e 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java @@ -20,45 +20,56 @@ package me.lucko.spark.forge; -import it.unimi.dsi.fastutil.longs.LongIterator; -import it.unimi.dsi.fastutil.longs.LongSet; - +import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap; import me.lucko.spark.common.platform.world.AbstractChunkInfo; import me.lucko.spark.common.platform.world.CountMap; import me.lucko.spark.common.platform.world.WorldInfoProvider; - import net.minecraft.client.Minecraft; import net.minecraft.client.multiplayer.ClientLevel; import net.minecraft.server.MinecraftServer; import net.minecraft.server.level.ServerLevel; +import net.minecraft.server.packs.repository.PackRepository; +import net.minecraft.server.packs.repository.PackSource; import net.minecraft.world.entity.Entity; import net.minecraft.world.entity.EntityType; import net.minecraft.world.level.ChunkPos; +import net.minecraft.world.level.GameRules; import net.minecraft.world.level.entity.EntityLookup; -import net.minecraft.world.level.entity.EntitySection; -import net.minecraft.world.level.entity.EntitySectionStorage; import net.minecraft.world.level.entity.PersistentEntitySectionManager; import net.minecraft.world.level.entity.TransientEntitySectionManager; -import java.util.ArrayList; +import java.util.Collection; import java.util.HashMap; import java.util.List; -import java.util.stream.Stream; +import java.util.stream.Collectors; public abstract class ForgeWorldInfoProvider implements WorldInfoProvider { - protected List getChunksFromCache(EntitySectionStorage cache) { - LongSet loadedChunks = cache.getAllChunksWithExistingSections(); - List list = new ArrayList<>(loadedChunks.size()); - - for (LongIterator iterator = loadedChunks.iterator(); iterator.hasNext(); ) { - long chunkPos = iterator.nextLong(); - Stream> sections = cache.getExistingSectionsInChunk(chunkPos); + protected abstract PackRepository getPackRepository(); + + @Override + public Collection pollDataPacks() { + return getPackRepository().getSelectedPacks().stream() + .map(pack -> new DataPackInfo( + pack.getId(), + pack.getDescription().getString(), + resourcePackSource(pack.getPackSource()) + )) + .collect(Collectors.toList()); + } - list.add(new ForgeChunkInfo(chunkPos, sections)); + private static String resourcePackSource(PackSource source) { + if (source == PackSource.DEFAULT) { + return "none"; + } else if (source == PackSource.BUILT_IN) { + return "builtin"; + } else if (source == PackSource.WORLD) { + return "world"; + } else if (source == PackSource.SERVER) { + return "server"; + } else { + return "unknown"; } - - return list; } public static final class Server extends ForgeWorldInfoProvider { @@ -90,15 +101,47 @@ public ChunksResult pollChunks() { ChunksResult data = new ChunksResult<>(); for (ServerLevel level : this.server.getAllLevels()) { - PersistentEntitySectionManager entityManager = level.entityManager; - EntitySectionStorage cache = entityManager.sectionStorage; + Long2ObjectOpenHashMap levelInfos = new Long2ObjectOpenHashMap<>(); - List list = getChunksFromCache(cache); - data.put(level.dimension().location().getPath(), list); + for (Entity entity : level.getEntities().getAll()) { + ForgeChunkInfo info = levelInfos.computeIfAbsent( + entity.chunkPosition().toLong(), ForgeChunkInfo::new); + info.entityCounts.increment(entity.getType()); + } + + data.put(level.dimension().location().getPath(), List.copyOf(levelInfos.values())); } return data; } + + @Override + public GameRulesResult pollGameRules() { + GameRulesResult data = new GameRulesResult(); + Iterable levels = this.server.getAllLevels(); + + GameRules.visitGameRuleTypes(new GameRules.GameRuleTypeVisitor() { + @Override + public > void visit(GameRules.Key key, GameRules.Type type) { + String defaultValue = type.createRule().serialize(); + data.putDefault(key.getId(), defaultValue); + + for (ServerLevel level : levels) { + String levelName = level.dimension().location().getPath(); + + String value = level.getGameRules().getRule(key).serialize(); + data.put(key.getId(), levelName, value); + } + } + }); + + return data; + } + + @Override + protected PackRepository getPackRepository() { + return this.server.getPackRepository(); + } } public static final class Client extends ForgeWorldInfoProvider { @@ -126,37 +169,64 @@ public CountsResult pollCounts() { @Override public ChunksResult pollChunks() { + ClientLevel level = this.client.level; + if (level == null) { + return null; + } + ChunksResult data = new ChunksResult<>(); + Long2ObjectOpenHashMap levelInfos = new Long2ObjectOpenHashMap<>(); + + for (Entity entity : level.getEntities().getAll()) { + ForgeChunkInfo info = levelInfos.computeIfAbsent(entity.chunkPosition().toLong(), ForgeChunkInfo::new); + info.entityCounts.increment(entity.getType()); + } + + data.put(level.dimension().location().getPath(), List.copyOf(levelInfos.values())); + + return data; + } + + @Override + public GameRulesResult pollGameRules() { ClientLevel level = this.client.level; if (level == null) { return null; } - TransientEntitySectionManager entityManager = level.entityStorage; - EntitySectionStorage cache = entityManager.sectionStorage; + GameRulesResult data = new GameRulesResult(); + + String levelName = level.dimension().location().getPath(); + GameRules levelRules = level.getGameRules(); + + GameRules.visitGameRuleTypes(new GameRules.GameRuleTypeVisitor() { + @Override + public > void visit(GameRules.Key key, GameRules.Type type) { + String defaultValue = type.createRule().serialize(); + data.putDefault(key.getId(), defaultValue); - List list = getChunksFromCache(cache); - data.put(level.dimension().location().getPath(), list); + String value = levelRules.getRule(key).serialize(); + data.put(key.getId(), levelName, value); + } + }); return data; } + + @Override + protected PackRepository getPackRepository() { + return this.client.getResourcePackRepository(); + } } - static final class ForgeChunkInfo extends AbstractChunkInfo> { + public static final class ForgeChunkInfo extends AbstractChunkInfo> { private final CountMap> entityCounts; - ForgeChunkInfo(long chunkPos, Stream> entities) { + ForgeChunkInfo(long chunkPos) { super(ChunkPos.getX(chunkPos), ChunkPos.getZ(chunkPos)); this.entityCounts = new CountMap.Simple<>(new HashMap<>()); - entities.forEach(section -> { - if (section.getStatus().isAccessible()) { - section.getEntities().forEach(entity -> - this.entityCounts.increment(entity.getType()) - ); - } - }); } @Override diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java index a8c7c92e..86915a41 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java @@ -26,21 +26,17 @@ import com.mojang.brigadier.suggestion.SuggestionProvider; import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; - -import me.lucko.spark.common.platform.MetadataProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.forge.ForgeCommandSender; -import me.lucko.spark.forge.ForgeExtraMetadataProvider; import me.lucko.spark.forge.ForgePlatformInfo; import me.lucko.spark.forge.ForgeSparkMod; import me.lucko.spark.forge.ForgeTickHook; import me.lucko.spark.forge.ForgeTickReporter; import me.lucko.spark.forge.ForgeWorldInfoProvider; - import net.minecraft.client.Minecraft; import net.minecraft.commands.CommandSource; import net.minecraft.commands.CommandSourceStack; @@ -138,11 +134,6 @@ public WorldInfoProvider createWorldInfoProvider() { return new ForgeWorldInfoProvider.Client(this.minecraft); } - @Override - public MetadataProvider createExtraMetadataProvider() { - return new ForgeExtraMetadataProvider(this.minecraft.getResourcePackRepository()); - } - @Override public PlatformInfo getPlatformInfo() { return new ForgePlatformInfo(PlatformInfo.Type.CLIENT); diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java index 87370579..4af45d92 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java @@ -28,9 +28,7 @@ import com.mojang.brigadier.suggestion.SuggestionProvider; import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; - import me.lucko.spark.common.monitor.ping.PlayerPingProvider; -import me.lucko.spark.common.platform.MetadataProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; @@ -38,7 +36,6 @@ import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.forge.ForgeCommandSender; -import me.lucko.spark.forge.ForgeExtraMetadataProvider; import me.lucko.spark.forge.ForgePlatformInfo; import me.lucko.spark.forge.ForgePlayerPingProvider; import me.lucko.spark.forge.ForgeServerConfigProvider; @@ -46,7 +43,6 @@ import me.lucko.spark.forge.ForgeTickHook; import me.lucko.spark.forge.ForgeTickReporter; import me.lucko.spark.forge.ForgeWorldInfoProvider; - import net.minecraft.commands.CommandSource; import net.minecraft.commands.CommandSourceStack; import net.minecraft.server.MinecraftServer; @@ -247,11 +243,6 @@ public ServerConfigProvider createServerConfigProvider() { return new ForgeServerConfigProvider(); } - @Override - public MetadataProvider createExtraMetadataProvider() { - return new ForgeExtraMetadataProvider(this.server.getPackRepository()); - } - @Override public WorldInfoProvider createWorldInfoProvider() { return new ForgeWorldInfoProvider.Server(this.server); diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java index 56061b9c..5c0761dc 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java @@ -30,7 +30,6 @@ import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; import com.mojang.brigadier.tree.LiteralCommandNode; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.command.sender.CommandSender; @@ -39,11 +38,9 @@ import me.lucko.spark.common.util.SparkThreadFactory; import me.lucko.spark.forge.ForgeClassSourceLookup; import me.lucko.spark.forge.ForgeSparkMod; - import net.minecraft.commands.CommandSource; import net.minecraftforge.fml.ModList; import net.minecraftforge.forgespi.language.IModInfo; - import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -120,7 +117,8 @@ public Collection getKnownSources() { ModList.get().getMods(), IModInfo::getModId, mod -> mod.getVersion().toString(), - mod -> null // ? + mod -> null, // ? + IModInfo::getDescription ); } diff --git a/spark-forge/src/main/resources/META-INF/accesstransformer.cfg b/spark-forge/src/main/resources/META-INF/accesstransformer.cfg index 2699a0eb..37722747 100644 --- a/spark-forge/src/main/resources/META-INF/accesstransformer.cfg +++ b/spark-forge/src/main/resources/META-INF/accesstransformer.cfg @@ -5,3 +5,4 @@ public net.minecraft.client.multiplayer.ClientLevel f_171631_ # entityStorage public net.minecraft.world.level.entity.TransientEntitySectionManager f_157638_ # sectionStorage public net.minecraft.world.level.entity.TransientEntitySectionManager f_157637_ # entityStorage public net.minecraft.client.Minecraft f_91018_ # gameThread +public net.minecraft.client.multiplayer.ClientLevel m_142646_()Lnet/minecraft/world/level/entity/LevelEntityGetter; # getEntities diff --git a/spark-geyser/build.gradle b/spark-geyser/build.gradle index 7c14cff9..77329941 100644 --- a/spark-geyser/build.gradle +++ b/spark-geyser/build.gradle @@ -5,9 +5,11 @@ plugins { dependencies { implementation project(':spark-common') implementation 'net.kyori:adventure-text-serializer-legacy:4.14.0' - compileOnly 'org.geysermc.geyser:api:2.2.3-SNAPSHOT' - // TODO Remove - compileOnly 'org.geysermc.geyser:core:2.2.3-SNAPSHOT' + compileOnly 'org.geysermc.geyser:api:2.4.2-SNAPSHOT' + compileOnly 'com.google.guava:guava:29.0-jre' + compileOnly('org.geysermc.geyser:core:2.4.2-SNAPSHOT') { + transitive(false) + } } repositories { diff --git a/spark-geyser/src/main/java/me/lucko/spark/geyser/GeyserCommandSender.java b/spark-geyser/src/main/java/me/lucko/spark/geyser/GeyserCommandSender.java index d8d4c87d..230b5a72 100644 --- a/spark-geyser/src/main/java/me/lucko/spark/geyser/GeyserCommandSender.java +++ b/spark-geyser/src/main/java/me/lucko/spark/geyser/GeyserCommandSender.java @@ -26,7 +26,6 @@ import net.kyori.adventure.text.serializer.legacy.LegacyComponentSerializer; import org.geysermc.geyser.api.command.CommandSource; -import org.geysermc.geyser.session.GeyserSession; import java.util.UUID; @@ -43,10 +42,7 @@ public String getName() { @Override public UUID getUniqueId() { - if (this.delegate instanceof GeyserSession) { - return ((GeyserSession) this.delegate).javaUuid(); - } - return null; + return this.delegate.playerUuid(); } @Override diff --git a/spark-geyser/src/main/java/me/lucko/spark/geyser/GeyserPlatformInfo.java b/spark-geyser/src/main/java/me/lucko/spark/geyser/GeyserPlatformInfo.java index 62703439..d9d2c477 100644 --- a/spark-geyser/src/main/java/me/lucko/spark/geyser/GeyserPlatformInfo.java +++ b/spark-geyser/src/main/java/me/lucko/spark/geyser/GeyserPlatformInfo.java @@ -22,15 +22,14 @@ import me.lucko.spark.common.platform.PlatformInfo; -import org.geysermc.geyser.GeyserImpl; +import org.geysermc.api.util.ApiVersion; import org.geysermc.geyser.api.GeyserApi; -import org.geysermc.geyser.network.GameProtocol; public class GeyserPlatformInfo implements PlatformInfo { - private final GeyserApi server; + private final GeyserApi geyserApi; - public GeyserPlatformInfo(GeyserApi server) { - this.server = server; + public GeyserPlatformInfo(GeyserApi geyserApi) { + this.geyserApi = geyserApi; } @Override @@ -43,13 +42,19 @@ public String getName() { return "Geyser"; } + @Override + public String getBrand() { + return "Geyser"; + } + @Override public String getVersion() { - return GeyserImpl.VERSION; // TODO Move to API + ApiVersion version = geyserApi.geyserApiVersion(); + return version.human() + "." + version.major() + "." + version.minor(); } @Override public String getMinecraftVersion() { - return GameProtocol.getJavaMinecraftVersion(); + return geyserApi.supportedJavaVersion().versionString(); } } diff --git a/spark-geyser/src/main/java/me/lucko/spark/geyser/GeyserPlayerPingProvider.java b/spark-geyser/src/main/java/me/lucko/spark/geyser/GeyserPlayerPingProvider.java index 8f8b141c..b5e0109a 100644 --- a/spark-geyser/src/main/java/me/lucko/spark/geyser/GeyserPlayerPingProvider.java +++ b/spark-geyser/src/main/java/me/lucko/spark/geyser/GeyserPlayerPingProvider.java @@ -24,11 +24,8 @@ import me.lucko.spark.common.monitor.ping.PlayerPingProvider; -import org.cloudburstmc.netty.channel.raknet.RakChildChannel; -import org.cloudburstmc.netty.handler.codec.raknet.common.RakSessionCodec; import org.geysermc.geyser.api.GeyserApi; import org.geysermc.geyser.api.connection.GeyserConnection; -import org.geysermc.geyser.session.GeyserSession; import java.util.Map; @@ -43,9 +40,7 @@ public GeyserPlayerPingProvider(GeyserApi server) { public Map poll() { ImmutableMap.Builder builder = ImmutableMap.builder(); for (GeyserConnection player : this.server.onlineConnections()) { - if (player.isConsole()) continue; - RakSessionCodec rakSessionCodec = ((RakChildChannel) ((GeyserSession) player).getUpstream().getSession().getPeer().getChannel()).rakPipeline().get(RakSessionCodec.class); - builder.put(player.name(), (int) rakSessionCodec.getPing()); + builder.put(player.name(), player.ping()); } return builder.build(); } diff --git a/spark-geyser/src/main/java/me/lucko/spark/geyser/GeyserSparkExtension.java b/spark-geyser/src/main/java/me/lucko/spark/geyser/GeyserSparkExtension.java index 84e87e61..8a01170b 100644 --- a/spark-geyser/src/main/java/me/lucko/spark/geyser/GeyserSparkExtension.java +++ b/spark-geyser/src/main/java/me/lucko/spark/geyser/GeyserSparkExtension.java @@ -26,6 +26,7 @@ import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.SourceMetadata; import org.geysermc.event.subscribe.Subscribe; import org.geysermc.geyser.GeyserImpl; import org.geysermc.geyser.api.command.Command; @@ -40,6 +41,7 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.List; import java.util.logging.Level; import java.util.stream.Stream; @@ -48,21 +50,22 @@ public class GeyserSparkExtension implements SparkPlugin, Extension { private SparkPlatform platform; @Subscribe - public void onPreInitialize(GeyserPreInitializeEvent event) { + public void onPreInitialize(GeyserPreInitializeEvent ignored) { if (this.geyserApi().platformType() != PlatformType.STANDALONE) { - this.logger().severe("spark is only supported on standalone Geyser instances! If you wish to use it on other platforms please download the spark version for that platform."); + this.logger().severe("spark is only supported on Geyser-Standalone instances! If you wish to use it on other platforms, use the spark version for that platform."); this.disable(); + return; } this.platform = new SparkPlatform(this); } @Subscribe - public void onPostInitialize(GeyserPostInitializeEvent event) { + public void onPostInitialize(GeyserPostInitializeEvent ignored) { this.platform.enable(); } @Subscribe - public void onShutdown(GeyserShutdownEvent event) { + public void onShutdown(GeyserShutdownEvent ignored) { if (this.platform != null) { this.platform.disable(); } @@ -104,7 +107,7 @@ public String getCommandName() { public Stream getCommandSenders() { return Stream.concat( this.geyserApi().onlineConnections().stream(), - Stream.of(((GeyserImpl) this.geyserApi()).getLogger()) + Stream.of(geyserApi().consoleCommandSource()) ).map(GeyserCommandSender::new); } @@ -140,4 +143,15 @@ public PlayerPingProvider createPlayerPingProvider() { public PlatformInfo getPlatformInfo() { return new GeyserPlatformInfo(this.geyserApi()); } + + @Override + public Collection getKnownSources() { + return SourceMetadata.gather( + geyserApi().extensionManager().extensions(), + Extension::name, + extension -> extension.description().version(), + extension -> String.join(", ", extension.description().authors()), + extension -> null + ); + } } diff --git a/spark-geyser/src/main/resources/extension.yml b/spark-geyser/src/main/resources/extension.yml index b3199732..ce40e759 100644 --- a/spark-geyser/src/main/resources/extension.yml +++ b/spark-geyser/src/main/resources/extension.yml @@ -1,6 +1,6 @@ id: spark name: spark main: me.lucko.spark.geyser.GeyserSparkExtension -api: 1.0.1 +api: 2.4.2 version: ${pluginVersion} authors: [Luck] diff --git a/spark-minestom/build.gradle b/spark-minestom/build.gradle index 19587094..6e0d7e62 100644 --- a/spark-minestom/build.gradle +++ b/spark-minestom/build.gradle @@ -1,5 +1,5 @@ plugins { - id 'com.github.johnrengelman.shadow' version '8.1.1' + id 'com.gradleup.shadow' version '8.3.0' } tasks.withType(JavaCompile) { @@ -41,6 +41,8 @@ shadowJar { exclude 'module-info.class' exclude 'META-INF/maven/**' exclude 'META-INF/proguard/**' + exclude '**/*.proto' + exclude '**/*.proto.bin' } artifacts { diff --git a/spark-neoforge/build.gradle b/spark-neoforge/build.gradle index 177449fc..fc50a85c 100644 --- a/spark-neoforge/build.gradle +++ b/spark-neoforge/build.gradle @@ -1,18 +1,36 @@ plugins { - id 'com.github.johnrengelman.shadow' version '8.1.1' - id 'net.neoforged.gradle.userdev' version '7.0.97' + id 'com.gradleup.shadow' version '8.3.0' + id 'net.neoforged.moddev' version '1.0.19' } -java.toolchain.languageVersion = JavaLanguageVersion.of(17) +tasks.withType(JavaCompile).configureEach { + // override, compile targeting J21 + options.release = 21 +} -tasks.withType(JavaCompile) { - // override, compile targeting J17 - options.release = 17 +java { + toolchain { + languageVersion = JavaLanguageVersion.of(21) + } } -minecraft { - accessTransformers { - file('src/main/resources/META-INF/accesstransformer.cfg') +neoForge { + version = "21.1.22" + validateAccessTransformers = true + + runs { + client { + client() + } + server { + server() + } + } + + mods { + spark { + sourceSet sourceSets.main + } } } @@ -22,13 +40,12 @@ configurations { } dependencies { - implementation "net.neoforged:neoforge:20.4.223" shade project(':spark-common') } processResources { from(sourceSets.main.resources.srcDirs) { - include 'META-INF/mods.toml' + include 'META-INF/neoforge.mods.toml' expand ( 'pluginVersion': project.pluginVersion, 'pluginDescription': project.pluginDescription @@ -36,7 +53,7 @@ processResources { } from(sourceSets.main.resources.srcDirs) { - exclude 'META-INF/mods.toml' + exclude 'META-INF/neoforge.mods.toml' } } @@ -46,6 +63,7 @@ shadowJar { relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' + relocate 'net.kyori.option', 'me.lucko.spark.lib.adventure.option' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' @@ -56,6 +74,8 @@ shadowJar { exclude 'module-info.class' exclude 'META-INF/maven/**' exclude 'META-INF/proguard/**' + exclude '**/*.proto' + exclude '**/*.proto.bin' } artifacts { diff --git a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeCommandSender.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeCommandSender.java index 2f6a4110..8cc97888 100644 --- a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeCommandSender.java +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeCommandSender.java @@ -25,6 +25,7 @@ import net.kyori.adventure.text.Component; import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer; import net.minecraft.commands.CommandSource; +import net.minecraft.core.RegistryAccess; import net.minecraft.network.chat.Component.Serializer; import net.minecraft.network.chat.MutableComponent; import net.minecraft.server.MinecraftServer; @@ -65,7 +66,7 @@ public UUID getUniqueId() { @Override public void sendMessage(Component message) { - MutableComponent component = Serializer.fromJson(GsonComponentSerializer.gson().serializeToTree(message)); + MutableComponent component = Serializer.fromJson(GsonComponentSerializer.gson().serializeToTree(message), RegistryAccess.EMPTY); Objects.requireNonNull(component, "component"); super.delegate.sendSystemMessage(component); } diff --git a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeExtraMetadataProvider.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeExtraMetadataProvider.java deleted file mode 100644 index e18b3d31..00000000 --- a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeExtraMetadataProvider.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.neoforge; - -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; -import me.lucko.spark.common.platform.MetadataProvider; -import net.minecraft.server.packs.repository.Pack; -import net.minecraft.server.packs.repository.PackRepository; -import net.minecraft.server.packs.repository.PackSource; - -import java.util.LinkedHashMap; -import java.util.Map; - -public class NeoForgeExtraMetadataProvider implements MetadataProvider { - - private final PackRepository resourcePackManager; - - public NeoForgeExtraMetadataProvider(PackRepository resourcePackManager) { - this.resourcePackManager = resourcePackManager; - } - - @Override - public Map get() { - Map metadata = new LinkedHashMap<>(); - metadata.put("datapacks", datapackMetadata()); - return metadata; - } - - private JsonElement datapackMetadata() { - JsonObject datapacks = new JsonObject(); - for (Pack profile : this.resourcePackManager.getSelectedPacks()) { - JsonObject obj = new JsonObject(); - obj.addProperty("name", profile.getTitle().getString()); - obj.addProperty("description", profile.getDescription().getString()); - obj.addProperty("source", resourcePackSource(profile.getPackSource())); - datapacks.add(profile.getId(), obj); - } - return datapacks; - } - - private static String resourcePackSource(PackSource source) { - if (source == PackSource.DEFAULT) { - return "none"; - } else if (source == PackSource.BUILT_IN) { - return "builtin"; - } else if (source == PackSource.WORLD) { - return "world"; - } else if (source == PackSource.SERVER) { - return "server"; - } else { - return "unknown"; - } - } -} diff --git a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgePlatformInfo.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgePlatformInfo.java index ea0141ed..6a2338f5 100644 --- a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgePlatformInfo.java +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgePlatformInfo.java @@ -21,8 +21,10 @@ package me.lucko.spark.neoforge; import me.lucko.spark.common.platform.PlatformInfo; -import net.neoforged.neoforge.internal.versions.neoforge.NeoForgeVersion; -import net.neoforged.neoforge.internal.versions.neoform.NeoFormVersion; +import net.neoforged.fml.ModContainer; +import net.neoforged.fml.ModList; +import net.neoforged.fml.loading.FMLLoader; +import net.neoforged.neoforgespi.language.IModInfo; public class NeoForgePlatformInfo implements PlatformInfo { private final Type type; @@ -41,13 +43,21 @@ public String getName() { return "NeoForge"; } + @Override + public String getBrand() { + return ModList.get().getModContainerById("neoforge") + .map(ModContainer::getModInfo) + .map(IModInfo::getDisplayName) + .orElse("NeoForge"); + } + @Override public String getVersion() { - return NeoForgeVersion.getVersion(); + return FMLLoader.versionInfo().neoForgeVersion(); } @Override public String getMinecraftVersion() { - return NeoFormVersion.getMCVersion(); + return FMLLoader.versionInfo().mcVersion(); } } diff --git a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeSparkMod.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeSparkMod.java index 16c19ec2..72436c9d 100644 --- a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeSparkMod.java +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeSparkMod.java @@ -24,7 +24,6 @@ import me.lucko.spark.neoforge.plugin.NeoForgeServerSparkPlugin; import net.neoforged.bus.api.IEventBus; import net.neoforged.bus.api.SubscribeEvent; -import net.neoforged.fml.IExtensionPoint; import net.neoforged.fml.ModContainer; import net.neoforged.fml.ModLoadingContext; import net.neoforged.fml.common.Mod; @@ -49,8 +48,6 @@ public NeoForgeSparkMod(IEventBus eventBus) { eventBus.addListener(this::setup); eventBus.addListener(this::clientInit); NeoForge.EVENT_BUS.register(this); - - ModLoadingContext.get().registerExtensionPoint(IExtensionPoint.DisplayTest.class, () -> new IExtensionPoint.DisplayTest(() -> IExtensionPoint.DisplayTest.IGNORESERVERONLY, (a, b) -> true)); } public String getVersion() { diff --git a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeTickHook.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeTickHook.java index 84e1aff2..987c42d0 100644 --- a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeTickHook.java +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeTickHook.java @@ -23,51 +23,35 @@ import me.lucko.spark.common.tick.AbstractTickHook; import me.lucko.spark.common.tick.TickHook; import net.neoforged.bus.api.SubscribeEvent; +import net.neoforged.neoforge.client.event.ClientTickEvent; import net.neoforged.neoforge.common.NeoForge; -import net.neoforged.neoforge.event.TickEvent; +import net.neoforged.neoforge.event.tick.ServerTickEvent; -public class NeoForgeTickHook extends AbstractTickHook implements TickHook { - private final TickEvent.Type type; - - public NeoForgeTickHook(TickEvent.Type type) { - this.type = type; +public abstract class NeoForgeTickHook extends AbstractTickHook implements TickHook { + @Override + public void start() { + NeoForge.EVENT_BUS.register(this); } - @SubscribeEvent - public void onTick(TickEvent.ServerTickEvent e) { - if (e.phase != TickEvent.Phase.START) { - return; - } - - if (e.type != this.type) { - return; - } - - onTick(); + @Override + public void close() { + NeoForge.EVENT_BUS.unregister(this); } - @SubscribeEvent - public void onTick(TickEvent.ClientTickEvent e) { - if (e.phase != TickEvent.Phase.START) { - return; - } + public static final class Server extends NeoForgeTickHook { - if (e.type != this.type) { - return; + @SubscribeEvent + public void onTickStart(ServerTickEvent.Pre e) { + onTick(); } - - onTick(); } + public static final class Client extends NeoForgeTickHook { - @Override - public void start() { - NeoForge.EVENT_BUS.register(this); - } - - @Override - public void close() { - NeoForge.EVENT_BUS.unregister(this); + @SubscribeEvent + public void onTickStart(ClientTickEvent.Pre e) { + onTick(); + } } } diff --git a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeTickReporter.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeTickReporter.java index be61d3ef..7d289fb3 100644 --- a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeTickReporter.java +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeTickReporter.java @@ -23,40 +23,11 @@ import me.lucko.spark.common.tick.SimpleTickReporter; import me.lucko.spark.common.tick.TickReporter; import net.neoforged.bus.api.SubscribeEvent; +import net.neoforged.neoforge.client.event.ClientTickEvent; import net.neoforged.neoforge.common.NeoForge; -import net.neoforged.neoforge.event.TickEvent; +import net.neoforged.neoforge.event.tick.ServerTickEvent; -public class NeoForgeTickReporter extends SimpleTickReporter implements TickReporter { - private final TickEvent.Type type; - - public NeoForgeTickReporter(TickEvent.Type type) { - this.type = type; - } - - @SubscribeEvent - public void onTick(TickEvent.ServerTickEvent e) { - if (e.type != this.type) { - return; - } - - switch (e.phase) { - case START -> onStart(); - case END -> onEnd(); - default -> throw new AssertionError(e.phase); - } - } - @SubscribeEvent - public void onTick(TickEvent.ClientTickEvent e) { - if (e.type != this.type) { - return; - } - - switch (e.phase) { - case START -> onStart(); - case END -> onEnd(); - default -> throw new AssertionError(e.phase); - } - } +public abstract class NeoForgeTickReporter extends SimpleTickReporter implements TickReporter { @Override public void start() { @@ -69,4 +40,30 @@ public void close() { super.close(); } + public static final class Server extends NeoForgeTickReporter { + + @SubscribeEvent + public void onTickStart(ServerTickEvent.Pre e) { + onStart(); + } + + @SubscribeEvent + public void onTickEnd(ServerTickEvent.Post e) { + onEnd(); + } + } + + public static final class Client extends NeoForgeTickReporter { + + @SubscribeEvent + public void onTickStart(ClientTickEvent.Pre e) { + onStart(); + } + + @SubscribeEvent + public void onTickEnd(ClientTickEvent.Post e) { + onEnd(); + } + } + } diff --git a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeWorldInfoProvider.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeWorldInfoProvider.java index ef76646c..a234bc84 100644 --- a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeWorldInfoProvider.java +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/NeoForgeWorldInfoProvider.java @@ -20,8 +20,7 @@ package me.lucko.spark.neoforge; -import it.unimi.dsi.fastutil.longs.LongIterator; -import it.unimi.dsi.fastutil.longs.LongSet; +import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap; import me.lucko.spark.common.platform.world.AbstractChunkInfo; import me.lucko.spark.common.platform.world.CountMap; import me.lucko.spark.common.platform.world.WorldInfoProvider; @@ -29,34 +28,51 @@ import net.minecraft.client.multiplayer.ClientLevel; import net.minecraft.server.MinecraftServer; import net.minecraft.server.level.ServerLevel; +import net.minecraft.server.packs.repository.PackRepository; +import net.minecraft.server.packs.repository.PackSource; import net.minecraft.world.entity.Entity; import net.minecraft.world.entity.EntityType; import net.minecraft.world.level.ChunkPos; +import net.minecraft.world.level.GameRules; import net.minecraft.world.level.entity.EntityLookup; -import net.minecraft.world.level.entity.EntitySection; -import net.minecraft.world.level.entity.EntitySectionStorage; +import net.minecraft.world.level.entity.LevelEntityGetter; import net.minecraft.world.level.entity.PersistentEntitySectionManager; import net.minecraft.world.level.entity.TransientEntitySectionManager; +import net.neoforged.fml.ModList; -import java.util.ArrayList; +import java.lang.reflect.Method; +import java.util.Collection; import java.util.HashMap; import java.util.List; -import java.util.stream.Stream; +import java.util.stream.Collectors; public abstract class NeoForgeWorldInfoProvider implements WorldInfoProvider { - protected List getChunksFromCache(EntitySectionStorage cache) { - LongSet loadedChunks = cache.getAllChunksWithExistingSections(); - List list = new ArrayList<>(loadedChunks.size()); - - for (LongIterator iterator = loadedChunks.iterator(); iterator.hasNext(); ) { - long chunkPos = iterator.nextLong(); - Stream> sections = cache.getExistingSectionsInChunk(chunkPos); + protected abstract PackRepository getPackRepository(); + + @Override + public Collection pollDataPacks() { + return getPackRepository().getSelectedPacks().stream() + .map(pack -> new DataPackInfo( + pack.getId(), + pack.getDescription().getString(), + resourcePackSource(pack.getPackSource()) + )) + .collect(Collectors.toList()); + } - list.add(new ForgeChunkInfo(chunkPos, sections)); + private static String resourcePackSource(PackSource source) { + if (source == PackSource.DEFAULT) { + return "none"; + } else if (source == PackSource.BUILT_IN) { + return "builtin"; + } else if (source == PackSource.WORLD) { + return "world"; + } else if (source == PackSource.SERVER) { + return "server"; + } else { + return "unknown"; } - - return list; } public static final class Server extends NeoForgeWorldInfoProvider { @@ -73,10 +89,15 @@ public CountsResult pollCounts() { int chunks = 0; for (ServerLevel level : this.server.getAllLevels()) { - PersistentEntitySectionManager entityManager = level.entityManager; - EntityLookup entityIndex = entityManager.visibleEntityStorage; - entities += entityIndex.count(); + if (ModList.get().isLoaded("moonrise")) { + entities += MoonriseMethods.getEntityCount(level.getEntities()); + } else { + PersistentEntitySectionManager entityManager = level.entityManager; + EntityLookup entityIndex = entityManager.visibleEntityStorage; + entities += entityIndex.count(); + } + chunks += level.getChunkSource().getLoadedChunksCount(); } @@ -88,15 +109,47 @@ public ChunksResult pollChunks() { ChunksResult data = new ChunksResult<>(); for (ServerLevel level : this.server.getAllLevels()) { - PersistentEntitySectionManager entityManager = level.entityManager; - EntitySectionStorage cache = entityManager.sectionStorage; + Long2ObjectOpenHashMap levelInfos = new Long2ObjectOpenHashMap<>(); + + for (Entity entity : level.getEntities().getAll()) { + ForgeChunkInfo info = levelInfos.computeIfAbsent( + entity.chunkPosition().toLong(), ForgeChunkInfo::new); + info.entityCounts.increment(entity.getType()); + } - List list = getChunksFromCache(cache); - data.put(level.dimension().location().getPath(), list); + data.put(level.dimension().location().getPath(), List.copyOf(levelInfos.values())); } return data; } + + @Override + public GameRulesResult pollGameRules() { + GameRulesResult data = new GameRulesResult(); + Iterable levels = this.server.getAllLevels(); + + GameRules.visitGameRuleTypes(new GameRules.GameRuleTypeVisitor() { + @Override + public > void visit(GameRules.Key key, GameRules.Type type) { + String defaultValue = type.createRule().serialize(); + data.putDefault(key.getId(), defaultValue); + + for (ServerLevel level : levels) { + String levelName = level.dimension().location().getPath(); + + String value = level.getGameRules().getRule(key).serialize(); + data.put(key.getId(), levelName, value); + } + } + }); + + return data; + } + + @Override + protected PackRepository getPackRepository() { + return this.server.getPackRepository(); + } } public static final class Client extends NeoForgeWorldInfoProvider { @@ -113,10 +166,15 @@ public CountsResult pollCounts() { return null; } - TransientEntitySectionManager entityManager = level.entityStorage; - EntityLookup entityIndex = entityManager.entityStorage; + int entities; + if (ModList.get().isLoaded("moonrise")) { + entities = MoonriseMethods.getEntityCount(level.getEntities()); + } else { + TransientEntitySectionManager entityManager = level.entityStorage; + EntityLookup entityIndex = entityManager.entityStorage; + entities = entityIndex.count(); + } - int entities = entityIndex.count(); int chunks = level.getChunkSource().getLoadedChunksCount(); return new CountsResult(-1, entities, -1, chunks); @@ -124,37 +182,64 @@ public CountsResult pollCounts() { @Override public ChunksResult pollChunks() { + ClientLevel level = this.client.level; + if (level == null) { + return null; + } + ChunksResult data = new ChunksResult<>(); + Long2ObjectOpenHashMap levelInfos = new Long2ObjectOpenHashMap<>(); + + for (Entity entity : level.getEntities().getAll()) { + ForgeChunkInfo info = levelInfos.computeIfAbsent(entity.chunkPosition().toLong(), ForgeChunkInfo::new); + info.entityCounts.increment(entity.getType()); + } + + data.put(level.dimension().location().getPath(), List.copyOf(levelInfos.values())); + + return data; + } + + @Override + public GameRulesResult pollGameRules() { ClientLevel level = this.client.level; if (level == null) { return null; } - TransientEntitySectionManager entityManager = level.entityStorage; - EntitySectionStorage cache = entityManager.sectionStorage; + GameRulesResult data = new GameRulesResult(); + + String levelName = level.dimension().location().getPath(); + GameRules levelRules = level.getGameRules(); + + GameRules.visitGameRuleTypes(new GameRules.GameRuleTypeVisitor() { + @Override + public > void visit(GameRules.Key key, GameRules.Type type) { + String defaultValue = type.createRule().serialize(); + data.putDefault(key.getId(), defaultValue); - List list = getChunksFromCache(cache); - data.put(level.dimension().location().getPath(), list); + String value = levelRules.getRule(key).serialize(); + data.put(key.getId(), levelName, value); + } + }); return data; } + + @Override + protected PackRepository getPackRepository() { + return this.client.getResourcePackRepository(); + } } public static final class ForgeChunkInfo extends AbstractChunkInfo> { private final CountMap> entityCounts; - ForgeChunkInfo(long chunkPos, Stream> entities) { + ForgeChunkInfo(long chunkPos) { super(ChunkPos.getX(chunkPos), ChunkPos.getZ(chunkPos)); this.entityCounts = new CountMap.Simple<>(new HashMap<>()); - entities.forEach(section -> { - if (section.getStatus().isAccessible()) { - section.getEntities().forEach(entity -> - this.entityCounts.increment(entity.getType()) - ); - } - }); } @Override @@ -168,5 +253,27 @@ public String entityTypeName(EntityType type) { } } + private static final class MoonriseMethods { + private static Method getEntityCount; + + private static Method getEntityCountMethod(LevelEntityGetter getter) { + if (getEntityCount == null) { + try { + getEntityCount = getter.getClass().getMethod("getEntityCount"); + } catch (final ReflectiveOperationException e) { + throw new RuntimeException("Cannot find Moonrise getEntityCount method", e); + } + } + return getEntityCount; + } + + private static int getEntityCount(LevelEntityGetter getter) { + try { + return (int) getEntityCountMethod(getter).invoke(getter); + } catch (final ReflectiveOperationException e) { + throw new RuntimeException("Failed to invoke Moonrise getEntityCount method", e); + } + } + } } diff --git a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeClientSparkPlugin.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeClientSparkPlugin.java index 92bd6563..b779af81 100644 --- a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeClientSparkPlugin.java +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeClientSparkPlugin.java @@ -26,14 +26,12 @@ import com.mojang.brigadier.suggestion.SuggestionProvider; import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; -import me.lucko.spark.common.platform.MetadataProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.neoforge.NeoForgeCommandSender; -import me.lucko.spark.neoforge.NeoForgeExtraMetadataProvider; import me.lucko.spark.neoforge.NeoForgePlatformInfo; import me.lucko.spark.neoforge.NeoForgeSparkMod; import me.lucko.spark.neoforge.NeoForgeTickHook; @@ -46,7 +44,6 @@ import net.neoforged.fml.event.lifecycle.FMLClientSetupEvent; import net.neoforged.neoforge.client.event.RegisterClientCommandsEvent; import net.neoforged.neoforge.common.NeoForge; -import net.neoforged.neoforge.event.TickEvent; import java.util.concurrent.CompletableFuture; import java.util.stream.Stream; @@ -123,12 +120,12 @@ public ThreadDumper getDefaultThreadDumper() { @Override public TickHook createTickHook() { - return new NeoForgeTickHook(TickEvent.Type.CLIENT); + return new NeoForgeTickHook.Client(); } @Override public TickReporter createTickReporter() { - return new NeoForgeTickReporter(TickEvent.Type.CLIENT); + return new NeoForgeTickReporter.Client(); } @Override @@ -136,11 +133,6 @@ public WorldInfoProvider createWorldInfoProvider() { return new NeoForgeWorldInfoProvider.Client(this.minecraft); } - @Override - public MetadataProvider createExtraMetadataProvider() { - return new NeoForgeExtraMetadataProvider(this.minecraft.getResourcePackRepository()); - } - @Override public PlatformInfo getPlatformInfo() { return new NeoForgePlatformInfo(PlatformInfo.Type.CLIENT); diff --git a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeServerSparkPlugin.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeServerSparkPlugin.java index 94522c12..5a573a03 100644 --- a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeServerSparkPlugin.java +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeServerSparkPlugin.java @@ -29,7 +29,6 @@ import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; -import me.lucko.spark.common.platform.MetadataProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; @@ -37,7 +36,6 @@ import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.neoforge.NeoForgeCommandSender; -import me.lucko.spark.neoforge.NeoForgeExtraMetadataProvider; import me.lucko.spark.neoforge.NeoForgePlatformInfo; import me.lucko.spark.neoforge.NeoForgePlayerPingProvider; import me.lucko.spark.neoforge.NeoForgeServerConfigProvider; @@ -52,7 +50,6 @@ import net.neoforged.bus.api.SubscribeEvent; import net.neoforged.neoforge.common.NeoForge; import net.neoforged.neoforge.event.RegisterCommandsEvent; -import net.neoforged.neoforge.event.TickEvent; import net.neoforged.neoforge.event.server.ServerAboutToStartEvent; import net.neoforged.neoforge.event.server.ServerStoppingEvent; import net.neoforged.neoforge.server.permission.PermissionAPI; @@ -226,12 +223,12 @@ public ThreadDumper getDefaultThreadDumper() { @Override public TickHook createTickHook() { - return new NeoForgeTickHook(TickEvent.Type.SERVER); + return new NeoForgeTickHook.Server(); } @Override public TickReporter createTickReporter() { - return new NeoForgeTickReporter(TickEvent.Type.SERVER); + return new NeoForgeTickReporter.Server(); } @Override @@ -244,11 +241,6 @@ public ServerConfigProvider createServerConfigProvider() { return new NeoForgeServerConfigProvider(); } - @Override - public MetadataProvider createExtraMetadataProvider() { - return new NeoForgeExtraMetadataProvider(this.server.getPackRepository()); - } - @Override public WorldInfoProvider createWorldInfoProvider() { return new NeoForgeWorldInfoProvider.Server(this.server); diff --git a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeSparkPlugin.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeSparkPlugin.java index 43d77a16..256f90a4 100644 --- a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeSparkPlugin.java +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeSparkPlugin.java @@ -117,7 +117,8 @@ public Collection getKnownSources() { ModList.get().getMods(), IModInfo::getModId, mod -> mod.getVersion().toString(), - mod -> null // ? + mod -> null, // ? + IModInfo::getDescription ); } diff --git a/spark-neoforge/src/main/resources/META-INF/accesstransformer.cfg b/spark-neoforge/src/main/resources/META-INF/accesstransformer.cfg index 43d14fc6..80f3f6e2 100644 --- a/spark-neoforge/src/main/resources/META-INF/accesstransformer.cfg +++ b/spark-neoforge/src/main/resources/META-INF/accesstransformer.cfg @@ -2,6 +2,7 @@ public net.minecraft.server.level.ServerLevel entityManager # entityManager public net.minecraft.world.level.entity.PersistentEntitySectionManager sectionStorage # sectionStorage public net.minecraft.world.level.entity.PersistentEntitySectionManager visibleEntityStorage # visibleEntityStorage public net.minecraft.client.multiplayer.ClientLevel entityStorage # entityStorage +public net.minecraft.client.multiplayer.ClientLevel getEntities()Lnet/minecraft/world/level/entity/LevelEntityGetter; # getEntities public net.minecraft.world.level.entity.TransientEntitySectionManager sectionStorage # sectionStorage public net.minecraft.world.level.entity.TransientEntitySectionManager entityStorage # entityStorage public net.minecraft.client.Minecraft gameThread # gameThread diff --git a/spark-neoforge/src/main/resources/META-INF/mods.toml b/spark-neoforge/src/main/resources/META-INF/neoforge.mods.toml similarity index 100% rename from spark-neoforge/src/main/resources/META-INF/mods.toml rename to spark-neoforge/src/main/resources/META-INF/neoforge.mods.toml diff --git a/spark-nukkit/build.gradle b/spark-nukkit/build.gradle index 2dd71698..c11e02e8 100644 --- a/spark-nukkit/build.gradle +++ b/spark-nukkit/build.gradle @@ -1,10 +1,9 @@ plugins { - id 'com.github.johnrengelman.shadow' version '8.1.1' + id 'com.gradleup.shadow' version '8.3.0' } dependencies { implementation project(':spark-common') - implementation 'net.kyori:adventure-text-serializer-legacy:4.13.1' compileOnly 'cn.nukkit:nukkit:1.0-SNAPSHOT' } @@ -27,6 +26,7 @@ shadowJar { relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' + relocate 'net.kyori.option', 'me.lucko.spark.lib.adventure.option' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' @@ -37,6 +37,8 @@ shadowJar { exclude 'module-info.class' exclude 'META-INF/maven/**' exclude 'META-INF/proguard/**' + exclude '**/*.proto' + exclude '**/*.proto.bin' } artifacts { diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitClassSourceLookup.java b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitClassSourceLookup.java index 180e0af8..e815b6cb 100644 --- a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitClassSourceLookup.java +++ b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitClassSourceLookup.java @@ -20,9 +20,8 @@ package me.lucko.spark.nukkit; -import me.lucko.spark.common.sampler.source.ClassSourceLookup; - import cn.nukkit.plugin.PluginClassLoader; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import java.io.IOException; import java.net.URISyntaxException; diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitCommandSender.java b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitCommandSender.java index 56d143fe..3d6de227 100644 --- a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitCommandSender.java +++ b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitCommandSender.java @@ -20,14 +20,12 @@ package me.lucko.spark.nukkit; +import cn.nukkit.Player; +import cn.nukkit.command.CommandSender; import me.lucko.spark.common.command.sender.AbstractCommandSender; - import net.kyori.adventure.text.Component; import net.kyori.adventure.text.serializer.legacy.LegacyComponentSerializer; -import cn.nukkit.Player; -import cn.nukkit.command.CommandSender; - import java.util.UUID; public class NukkitCommandSender extends AbstractCommandSender { diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlatformInfo.java b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlatformInfo.java index ab7a40bd..a1c9c5b9 100644 --- a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlatformInfo.java +++ b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlatformInfo.java @@ -20,9 +20,8 @@ package me.lucko.spark.nukkit; -import me.lucko.spark.common.platform.PlatformInfo; - import cn.nukkit.Server; +import me.lucko.spark.common.platform.PlatformInfo; public class NukkitPlatformInfo implements PlatformInfo { private final Server server; @@ -41,6 +40,11 @@ public String getName() { return "Nukkit"; } + @Override + public String getBrand() { + return this.server.getName(); + } + @Override public String getVersion() { return this.server.getNukkitVersion(); diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlayerPingProvider.java b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlayerPingProvider.java index fc25d7c0..b2e5100f 100644 --- a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlayerPingProvider.java +++ b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlayerPingProvider.java @@ -20,12 +20,10 @@ package me.lucko.spark.nukkit; -import com.google.common.collect.ImmutableMap; - -import me.lucko.spark.common.monitor.ping.PlayerPingProvider; - import cn.nukkit.Player; import cn.nukkit.Server; +import com.google.common.collect.ImmutableMap; +import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import java.util.Map; diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java index ae212413..39be28d3 100644 --- a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java +++ b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java @@ -20,6 +20,10 @@ package me.lucko.spark.nukkit; +import cn.nukkit.command.Command; +import cn.nukkit.command.CommandSender; +import cn.nukkit.plugin.PluginBase; +import cn.nukkit.plugin.service.ServicePriority; import me.lucko.spark.api.Spark; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; @@ -27,11 +31,6 @@ import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.sampler.source.ClassSourceLookup; -import cn.nukkit.command.Command; -import cn.nukkit.command.CommandSender; -import cn.nukkit.plugin.PluginBase; -import cn.nukkit.plugin.service.ServicePriority; - import java.nio.file.Path; import java.util.logging.Level; import java.util.stream.Stream; diff --git a/spark-paper/build.gradle b/spark-paper/build.gradle new file mode 100644 index 00000000..4ca3dd62 --- /dev/null +++ b/spark-paper/build.gradle @@ -0,0 +1,81 @@ +plugins { + id 'net.kyori.blossom' version '1.3.0' + id 'com.gradleup.shadow' version '8.3.0' + id 'maven-publish' +} + +tasks.withType(JavaCompile) { + // override, compile targeting J21 + options.release = 21 +} + +tasks.jar { + archiveClassifier = 'original' +} + +dependencies { + implementation project(':spark-common') + compileOnly 'io.papermc.paper:paper-api:1.21-R0.1-SNAPSHOT' +} + +repositories { + maven { url "https://repo.papermc.io/repository/maven-public/" } +} + +blossom { + replaceTokenIn('src/main/java/me/lucko/spark/paper/PaperSparkPlugin.java') + replaceToken '@version@', project.pluginVersion +} + +shadowJar { + archiveFileName = "spark-${project.pluginVersion}-paper.jar" + archiveClassifier = '' + + dependencies { + exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$')) + exclude(dependency('net.bytebuddy:byte-buddy-agent')) + } + + relocate 'net.kyori.adventure.text.feature.pagination', 'me.lucko.spark.paper.lib.adventure.pagination' + relocate 'com.google.protobuf', 'me.lucko.spark.paper.lib.protobuf' + relocate 'org.objectweb.asm', 'me.lucko.spark.paper.lib.asm' + relocate 'one.profiler', 'me.lucko.spark.paper.lib.asyncprofiler' + relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.paper.lib.bytesocks' + relocate 'org.java_websocket', 'me.lucko.spark.paper.lib.bytesocks.ws' + + // nest common classes beneath the paper package to avoid conflicts with spark-bukkit + relocate 'me.lucko.spark.common', 'me.lucko.spark.paper.common' + relocate 'me.lucko.spark.proto', 'me.lucko.spark.paper.proto' + relocate 'spark-native', 'spark-paper-native' + + exclude 'module-info.class' + exclude 'META-INF/maven/**' + exclude 'META-INF/proguard/**' + exclude '**/*.proto' + exclude '**/*.proto.bin' + exclude '**/*.proto' + exclude '**/*.proto.bin' +} + +artifacts { + archives shadowJar + shadow shadowJar +} + +publishing { + //repositories { + // maven { + // url = 'https://oss.sonatype.org/content/repositories/snapshots' + // credentials { + // username = sonatypeUsername + // password = sonatypePassword + // } + // } + //} + publications { + shadow(MavenPublication) { publication -> + project.shadow.component(publication) + version = "${project.pluginVersion}-SNAPSHOT" + } + } +} diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperClassSourceLookup.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperClassSourceLookup.java new file mode 100644 index 00000000..2c5f7c0e --- /dev/null +++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperClassSourceLookup.java @@ -0,0 +1,61 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.paper; + +import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import org.bukkit.plugin.java.JavaPlugin; + +import java.lang.reflect.Field; + +public class PaperClassSourceLookup extends ClassSourceLookup.ByClassLoader { + private static final Class PLUGIN_CLASS_LOADER; + private static final Field PLUGIN_FIELD; + + private static final Class PAPER_PLUGIN_CLASS_LOADER; + private static final Field PAPER_PLUGIN_FIELD; + + static { + try { + PLUGIN_CLASS_LOADER = Class.forName("org.bukkit.plugin.java.PluginClassLoader"); + PLUGIN_FIELD = PLUGIN_CLASS_LOADER.getDeclaredField("plugin"); + PLUGIN_FIELD.setAccessible(true); + + PAPER_PLUGIN_CLASS_LOADER = Class.forName("io.papermc.paper.plugin.entrypoint.classloader.PaperPluginClassLoader"); + PAPER_PLUGIN_FIELD = PAPER_PLUGIN_CLASS_LOADER.getDeclaredField("loadedJavaPlugin"); + PAPER_PLUGIN_FIELD.setAccessible(true); + } catch (ReflectiveOperationException e) { + throw new ExceptionInInitializerError(e); + } + } + + @Override + public String identify(ClassLoader loader) throws ReflectiveOperationException { + if (PLUGIN_CLASS_LOADER.isInstance(loader)) { + JavaPlugin plugin = (JavaPlugin) PLUGIN_FIELD.get(loader); + return plugin.getName(); + } else if (PAPER_PLUGIN_CLASS_LOADER.isInstance(loader)) { + JavaPlugin plugin = (JavaPlugin) PAPER_PLUGIN_FIELD.get(loader); + return plugin.getName(); + } + return null; + } +} + diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperCommandSender.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperCommandSender.java new file mode 100644 index 00000000..c3b569d1 --- /dev/null +++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperCommandSender.java @@ -0,0 +1,58 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.paper; + +import me.lucko.spark.common.command.sender.AbstractCommandSender; +import net.kyori.adventure.text.Component; +import org.bukkit.command.CommandSender; +import org.bukkit.entity.Player; + +import java.util.UUID; + +public class PaperCommandSender extends AbstractCommandSender { + + public PaperCommandSender(CommandSender sender) { + super(sender); + } + + @Override + public String getName() { + return this.delegate.getName(); + } + + @Override + public UUID getUniqueId() { + if (super.delegate instanceof Player player) { + return player.getUniqueId(); + } + return null; + } + + @Override + public void sendMessage(Component message) { + super.delegate.sendMessage(message); + } + + @Override + public boolean hasPermission(String permission) { + return super.delegate.hasPermission(permission); + } +} diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlatformInfo.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlatformInfo.java new file mode 100644 index 00000000..114175e9 --- /dev/null +++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlatformInfo.java @@ -0,0 +1,53 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.paper; + +import io.papermc.paper.ServerBuildInfo; +import me.lucko.spark.common.platform.PlatformInfo; + +public enum PaperPlatformInfo implements PlatformInfo { + INSTANCE; + + @Override + public Type getType() { + return Type.SERVER; + } + + @Override + public String getName() { + return "Paper"; + } + + @Override + public String getBrand() { + return ServerBuildInfo.buildInfo().brandName(); + } + + @Override + public String getVersion() { + return ServerBuildInfo.buildInfo().asString(ServerBuildInfo.StringRepresentation.VERSION_SIMPLE); + } + + @Override + public String getMinecraftVersion() { + return ServerBuildInfo.buildInfo().minecraftVersionId(); + } +} diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlayerPingProvider.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlayerPingProvider.java new file mode 100644 index 00000000..e896b214 --- /dev/null +++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlayerPingProvider.java @@ -0,0 +1,45 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.paper; + +import com.google.common.collect.ImmutableMap; +import me.lucko.spark.common.monitor.ping.PlayerPingProvider; +import org.bukkit.Server; +import org.bukkit.entity.Player; + +import java.util.Map; + +public class PaperPlayerPingProvider implements PlayerPingProvider { + private final Server server; + + public PaperPlayerPingProvider(Server server) { + this.server = server; + } + + @Override + public Map poll() { + ImmutableMap.Builder builder = ImmutableMap.builder(); + for (Player player : this.server.getOnlinePlayers()) { + builder.put(player.getName(), player.getPing()); + } + return builder.build(); + } +} diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperServerConfigProvider.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperServerConfigProvider.java new file mode 100644 index 00000000..d1301f8f --- /dev/null +++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperServerConfigProvider.java @@ -0,0 +1,159 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.paper; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonSerializer; +import me.lucko.spark.common.platform.serverconfig.ConfigParser; +import me.lucko.spark.common.platform.serverconfig.ExcludedConfigFilter; +import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; +import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; +import org.bukkit.Bukkit; +import org.bukkit.World; +import org.bukkit.configuration.MemorySection; +import org.bukkit.configuration.file.YamlConfiguration; + +import java.io.BufferedReader; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Collection; +import java.util.LinkedHashMap; +import java.util.Map; + +public class PaperServerConfigProvider extends ServerConfigProvider { + + /** A map of provided files and their type */ + private static final Map FILES; + /** A collection of paths to be excluded from the files */ + private static final Collection HIDDEN_PATHS; + + public PaperServerConfigProvider() { + super(FILES, HIDDEN_PATHS); + } + + private static class YamlConfigParser implements ConfigParser { + public static final YamlConfigParser INSTANCE = new YamlConfigParser(); + protected static final Gson GSON = new GsonBuilder() + .registerTypeAdapter(MemorySection.class, (JsonSerializer) (obj, type, ctx) -> ctx.serialize(obj.getValues(false))) + .create(); + + @Override + public JsonElement load(String file, ExcludedConfigFilter filter) throws IOException { + Map values = this.parse(Paths.get(file)); + if (values == null) { + return null; + } + + return filter.apply(GSON.toJsonTree(values)); + } + + @Override + public Map parse(BufferedReader reader) throws IOException { + YamlConfiguration config = YamlConfiguration.loadConfiguration(reader); + return config.getValues(false); + } + } + + // Paper 1.19+ split config layout + private static class SplitYamlConfigParser extends YamlConfigParser { + public static final SplitYamlConfigParser INSTANCE = new SplitYamlConfigParser(); + + @Override + public JsonElement load(String group, ExcludedConfigFilter filter) throws IOException { + String prefix = group.replace("/", ""); + + Path configDir = Paths.get("config"); + if (!Files.exists(configDir)) { + return null; + } + + JsonObject root = new JsonObject(); + + for (Map.Entry entry : getNestedFiles(configDir, prefix).entrySet()) { + String fileName = entry.getKey(); + Path path = entry.getValue(); + + Map values = this.parse(path); + if (values == null) { + continue; + } + + // apply the filter individually to each nested file + root.add(fileName, filter.apply(GSON.toJsonTree(values))); + } + + return root; + } + + private static Map getNestedFiles(Path configDir, String prefix) { + Map files = new LinkedHashMap<>(); + files.put("global.yml", configDir.resolve(prefix + "-global.yml")); + files.put("world-defaults.yml", configDir.resolve(prefix + "-world-defaults.yml")); + for (World world : Bukkit.getWorlds()) { + files.put(world.getName() + ".yml", world.getWorldFolder().toPath().resolve(prefix + "-world.yml")); + } + return files; + } + } + + static { + ImmutableMap.Builder files = ImmutableMap.builder() + .put("server.properties", PropertiesConfigParser.INSTANCE) + .put("bukkit.yml", YamlConfigParser.INSTANCE) + .put("spigot.yml", YamlConfigParser.INSTANCE) + .put("paper.yml", YamlConfigParser.INSTANCE) + .put("paper/", SplitYamlConfigParser.INSTANCE) + .put("purpur.yml", YamlConfigParser.INSTANCE) + .put("pufferfish.yml", YamlConfigParser.INSTANCE); + + for (String config : getSystemPropertyList("spark.serverconfigs.extra")) { + files.put(config, YamlConfigParser.INSTANCE); + } + + ImmutableSet.Builder hiddenPaths = ImmutableSet.builder() + .add("database") + .add("settings.bungeecord-addresses") + .add("settings.velocity-support.secret") + .add("proxies.velocity.secret") + .add("server-ip") + .add("motd") + .add("resource-pack") + .add("rconpassword") + .add("rconip") + .add("level-seed") + .add("world-settings.*.feature-seeds") + .add("world-settings.*.seed-*") + .add("feature-seeds") + .add("seed-*") + .addAll(getSystemPropertyList("spark.serverconfigs.hiddenpaths")); + + FILES = files.build(); + HIDDEN_PATHS = hiddenPaths.build(); + } + +} diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperSparkPlugin.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperSparkPlugin.java new file mode 100644 index 00000000..73694d27 --- /dev/null +++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperSparkPlugin.java @@ -0,0 +1,217 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.paper; + +import me.lucko.spark.api.Spark; +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.SparkPlugin; +import me.lucko.spark.common.monitor.ping.PlayerPingProvider; +import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; +import me.lucko.spark.common.platform.world.WorldInfoProvider; +import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.SourceMetadata; +import me.lucko.spark.common.tick.TickHook; +import me.lucko.spark.common.tick.TickReporter; +import me.lucko.spark.common.util.classfinder.ClassFinder; +import me.lucko.spark.paper.api.PaperClassLookup; +import me.lucko.spark.paper.api.PaperScheduler; +import me.lucko.spark.paper.api.PaperSparkModule; +import org.bukkit.Server; +import org.bukkit.command.CommandSender; +import org.bukkit.plugin.Plugin; + +import java.nio.file.Path; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.stream.Stream; + +public class PaperSparkPlugin implements PaperSparkModule, SparkPlugin { + private final Server server; + private final Logger logger; + private final PaperScheduler scheduler; + private final PaperClassLookup classLookup; + + private final PaperTickHook tickHook; + private final PaperTickReporter tickReporter; + private final ThreadDumper gameThreadDumper; + private final SparkPlatform platform; + + public PaperSparkPlugin(Server server, Logger logger, PaperScheduler scheduler, PaperClassLookup classLookup) { + this.server = server; + this.logger = logger; + this.scheduler = scheduler; + this.classLookup = classLookup; + this.tickHook = new PaperTickHook(); + this.tickReporter = new PaperTickReporter(); + this.gameThreadDumper = new ThreadDumper.Specific(Thread.currentThread()); + this.platform = new SparkPlatform(this); + } + + @Override + public void enable() { + this.platform.enable(); + } + + @Override + public void disable() { + this.platform.disable(); + } + + @Override + public void executeCommand(CommandSender sender, String[] args) { + this.platform.executeCommand(new PaperCommandSender(sender), args); + } + + @Override + public List tabComplete(CommandSender sender, String[] args) { + return this.platform.tabCompleteCommand(new PaperCommandSender(sender), args); + } + + @Override + public boolean hasPermission(CommandSender sender) { + return this.platform.hasPermissionForAnyCommand(new PaperCommandSender(sender)); + } + + @Override + public Collection getPermissions() { + return this.platform.getAllSparkPermissions(); + } + + @Override + public void onServerTickStart() { + this.tickHook.onTick(); + } + + @Override + public void onServerTickEnd(double duration) { + this.tickReporter.onTick(duration); + } + + @Override + public String getVersion() { + return "@version@"; + } + + @Override + public Path getPluginDirectory() { + return this.server.getPluginsFolder().toPath().resolve("spark"); + } + + @Override + public String getCommandName() { + return "spark"; + } + + @Override + public Stream getCommandSenders() { + return Stream.concat( + this.server.getOnlinePlayers().stream(), + Stream.of(this.server.getConsoleSender()) + ).map(PaperCommandSender::new); + } + + @Override + public void executeAsync(Runnable task) { + this.scheduler.executeAsync(task); + } + + @Override + public void executeSync(Runnable task) { + this.scheduler.executeSync(task); + } + + @Override + public void log(Level level, String msg) { + this.logger.log(level, msg); + } + + @Override + public ThreadDumper getDefaultThreadDumper() { + return this.gameThreadDumper; + } + + @Override + public TickHook createTickHook() { + return this.tickHook; + } + + @Override + public TickReporter createTickReporter() { + return this.tickReporter; + } + + @Override + public ClassSourceLookup createClassSourceLookup() { + return new PaperClassSourceLookup(); + } + + @Override + public ClassFinder createClassFinder() { + return className -> { + try { + return this.classLookup.lookup(className); + } catch (Exception e) { + return null; + } + }; + } + + @Override + public Collection getKnownSources() { + return SourceMetadata.gather( + Arrays.asList(this.server.getPluginManager().getPlugins()), + Plugin::getName, + plugin -> plugin.getPluginMeta().getVersion(), + plugin -> String.join(", ", plugin.getPluginMeta().getAuthors()), + plugin -> plugin.getPluginMeta().getDescription() + ); + } + + @Override + public PlayerPingProvider createPlayerPingProvider() { + return new PaperPlayerPingProvider(this.server); + } + + @Override + public ServerConfigProvider createServerConfigProvider() { + return new PaperServerConfigProvider(); + } + + @Override + public WorldInfoProvider createWorldInfoProvider() { + return new PaperWorldInfoProvider(this.server); + } + + @Override + public PlatformInfo getPlatformInfo() { + return PaperPlatformInfo.INSTANCE; + } + + @Override + public void registerApi(Spark api) { + // this.server.getServicesManager().register(Spark.class, api, null, ServicePriority.Normal); + } +} diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperTickHook.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperTickHook.java new file mode 100644 index 00000000..06126e16 --- /dev/null +++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperTickHook.java @@ -0,0 +1,46 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.paper; + +import me.lucko.spark.common.tick.AbstractTickHook; +import me.lucko.spark.common.tick.TickHook; +import org.bukkit.event.Listener; + +public class PaperTickHook extends AbstractTickHook implements TickHook, Listener { + private boolean open = false; + + @Override + public void start() { + this.open = true; + } + + @Override + public void close() { + this.open = false; + } + + @Override + public void onTick() { + if (this.open) { + super.onTick(); + } + } +} diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperTickReporter.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperTickReporter.java new file mode 100644 index 00000000..4db1f16e --- /dev/null +++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperTickReporter.java @@ -0,0 +1,46 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.paper; + +import me.lucko.spark.common.tick.AbstractTickReporter; +import me.lucko.spark.common.tick.TickReporter; +import org.bukkit.event.Listener; + +public class PaperTickReporter extends AbstractTickReporter implements TickReporter, Listener { + private boolean open = false; + + @Override + public void start() { + this.open = true; + } + + @Override + public void close() { + this.open = false; + } + + @Override + public void onTick(double duration) { + if (this.open) { + super.onTick(duration); + } + } +} diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperWorldInfoProvider.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperWorldInfoProvider.java new file mode 100644 index 00000000..1729a95f --- /dev/null +++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperWorldInfoProvider.java @@ -0,0 +1,149 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.paper; + +import me.lucko.spark.common.platform.world.AbstractChunkInfo; +import me.lucko.spark.common.platform.world.CountMap; +import me.lucko.spark.common.platform.world.WorldInfoProvider; +import org.bukkit.Chunk; +import org.bukkit.GameRule; +import org.bukkit.Server; +import org.bukkit.World; +import org.bukkit.entity.Entity; +import org.bukkit.entity.EntityType; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Locale; +import java.util.Objects; +import java.util.stream.Collectors; + +public class PaperWorldInfoProvider implements WorldInfoProvider { + private final Server server; + + public PaperWorldInfoProvider(Server server) { + this.server = server; + } + + @Override + public CountsResult pollCounts() { + int players = this.server.getOnlinePlayers().size(); + int entities = 0; + int tileEntities = 0; + int chunks = 0; + + for (World world : this.server.getWorlds()) { + entities += world.getEntityCount(); + tileEntities += world.getTileEntityCount(); + chunks += world.getChunkCount(); + } + + return new CountsResult(players, entities, tileEntities, chunks); + } + + @Override + public ChunksResult pollChunks() { + ChunksResult data = new ChunksResult<>(); + + for (World world : this.server.getWorlds()) { + Chunk[] chunks = world.getLoadedChunks(); + + List list = new ArrayList<>(chunks.length); + for (Chunk chunk : chunks) { + if (chunk != null) { + list.add(new PaperChunkInfo(chunk)); + } + } + + data.put(world.getName(), list); + } + + return data; + } + + @Override + public GameRulesResult pollGameRules() { + GameRulesResult data = new GameRulesResult(); + + boolean addDefaults = true; // add defaults in the first iteration + for (World world : this.server.getWorlds()) { + for (String gameRule : world.getGameRules()) { + GameRule ruleObj = GameRule.getByName(gameRule); + if (ruleObj == null) { + continue; + } + + if (addDefaults) { + Object defaultValue = world.getGameRuleDefault(ruleObj); + data.putDefault(gameRule, Objects.toString(defaultValue)); + } + + Object value = world.getGameRuleValue(ruleObj); + data.put(gameRule, world.getName(), Objects.toString(value)); + } + + addDefaults = false; + } + + return data; + } + + @SuppressWarnings("removal") + @Override + public Collection pollDataPacks() { + return this.server.getDataPackManager().getDataPacks().stream() + .map(pack -> new DataPackInfo( + pack.getTitle(), + pack.getDescription(), + pack.getSource().name().toLowerCase(Locale.ROOT).replace("_", "") + )) + .collect(Collectors.toList()); + } + + static final class PaperChunkInfo extends AbstractChunkInfo { + private final CountMap entityCounts; + + PaperChunkInfo(Chunk chunk) { + super(chunk.getX(), chunk.getZ()); + + this.entityCounts = new CountMap.EnumKeyed<>(EntityType.class); + for (Entity entity : chunk.getEntities()) { + if (entity != null) { + this.entityCounts.increment(entity.getType()); + } + } + } + + @Override + public CountMap getEntityCounts() { + return this.entityCounts; + } + + @SuppressWarnings("deprecation") + @Override + public String entityTypeName(EntityType type) { + return type.getName(); + } + + } + +} diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/api/Compatibility.java b/spark-paper/src/main/java/me/lucko/spark/paper/api/Compatibility.java new file mode 100644 index 00000000..deca3376 --- /dev/null +++ b/spark-paper/src/main/java/me/lucko/spark/paper/api/Compatibility.java @@ -0,0 +1,36 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.paper.api; + +/** + * Used to indicate the version of the spark module API supported by the Paper server. + * + *

This allows spark to add/deprecate callback methods in the {@link PaperSparkModule} interface, + * but know at runtime whether they will actually be called by Paper.

+ */ +public enum Compatibility { + + /** + * Indicates that Paper supports version 1.0 of the spark module API. + */ + VERSION_1_0 + +} diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperClassLookup.java b/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperClassLookup.java new file mode 100644 index 00000000..280e4d93 --- /dev/null +++ b/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperClassLookup.java @@ -0,0 +1,27 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.paper.api; + +public interface PaperClassLookup { + + Class lookup(String className) throws Exception; + +} diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperScheduler.java b/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperScheduler.java new file mode 100644 index 00000000..7f4fdd31 --- /dev/null +++ b/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperScheduler.java @@ -0,0 +1,29 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.paper.api; + +public interface PaperScheduler { + + void executeAsync(Runnable task); + + void executeSync(Runnable task); + +} diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperSparkModule.java b/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperSparkModule.java new file mode 100644 index 00000000..ce8b74a3 --- /dev/null +++ b/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperSparkModule.java @@ -0,0 +1,125 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.paper.api; + +import me.lucko.spark.paper.PaperSparkPlugin; +import org.bukkit.Server; +import org.bukkit.command.CommandSender; + +import java.util.Collection; +import java.util.List; +import java.util.logging.Logger; + +/** + * Spark module for use as a library within the Paper server implementation. + * + *

Paper provides:

+ *
    + *
  • a {@link Server} instance
  • + *
  • a {@link Logger} instance
  • + *
  • a {@link PaperScheduler} instance
  • + *
  • a {@link PaperClassLookup} instance
  • + *
+ * + *

Paper is expected to:

+ *
    + *
  • call {@link #enable()} to enable spark, either immediately or when the server has finished starting
  • + *
  • call {@link #disable()} to disable spark when the server is stopping
  • + *
  • call {@link #executeCommand(CommandSender, String[])} when the spark command is executed
  • + *
  • call {@link #tabComplete(CommandSender, String[])} when the spark command is tab completed
  • + *
  • call {@link #onServerTickStart()} at the start of each server tick
  • + *
  • call {@link #onServerTickEnd(double)} at the end of each server tick
  • + *
+ * + *

This interface and the other interfaces in this package define the API between Paper and spark. All other classes + * are subject to change, even between minor versions.

+ */ +public interface PaperSparkModule { + + /** + * Creates a new PaperSparkModule. + * + * @param compatibility the Paper/spark compatibility version + * @param server the server + * @param logger a logger that can be used by spark + * @param scheduler the scheduler + * @param classLookup a class lookup utility + * @return a new PaperSparkModule + */ + static PaperSparkModule create(Compatibility compatibility, Server server, Logger logger, PaperScheduler scheduler, PaperClassLookup classLookup) { + return new PaperSparkPlugin(server, logger, scheduler, classLookup); + } + + /** + * Enables the spark module. + */ + void enable(); + + /** + * Disables the spark module. + */ + void disable(); + + /** + * Handles a command execution. + * + * @param sender the sender + * @param args the command arguments + */ + void executeCommand(CommandSender sender, String[] args); + + /** + * Handles a tab completion request. + * + * @param sender the sender + * @param args the command arguments + * @return a list of completions + */ + List tabComplete(CommandSender sender, String[] args); + + /** + * Gets if the sender has permission to execute any spark commands. + * + * @param sender the sender + * @return if the sender has permission + */ + boolean hasPermission(CommandSender sender); + + /** + * Gets a collection of all known spark permissions. + * + * @return a set of permissions + */ + Collection getPermissions(); + + /** + * Called by Paper at the start of each server tick. + */ + void onServerTickStart(); + + /** + * Called by Paper at the end of each server tick. + * + * @param duration the duration of the tick + */ + void onServerTickEnd(double duration); + +} diff --git a/spark-sponge7/build.gradle b/spark-sponge7/build.gradle index aae4ef4a..da2f3d5b 100644 --- a/spark-sponge7/build.gradle +++ b/spark-sponge7/build.gradle @@ -1,6 +1,6 @@ plugins { id 'net.kyori.blossom' version '1.3.0' - id 'com.github.johnrengelman.shadow' version '8.1.1' + id 'com.gradleup.shadow' version '8.3.0' } dependencies { @@ -24,6 +24,7 @@ shadowJar { relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' + relocate 'net.kyori.option', 'me.lucko.spark.lib.adventure.option' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' @@ -35,6 +36,8 @@ shadowJar { exclude 'META-INF/maven/**' exclude 'META-INF/proguard/**' exclude 'META-INF/versions/**' + exclude '**/*.proto' + exclude '**/*.proto.bin' } artifacts { diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7ClassSourceLookup.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7ClassSourceLookup.java index 899ce58c..9342f3d8 100644 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7ClassSourceLookup.java +++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7ClassSourceLookup.java @@ -21,7 +21,6 @@ package me.lucko.spark.sponge; import me.lucko.spark.common.sampler.source.ClassSourceLookup; - import org.spongepowered.api.Game; import java.nio.file.Path; diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7CommandSender.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7CommandSender.java index b0cfd3cd..f19dfcef 100644 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7CommandSender.java +++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7CommandSender.java @@ -21,10 +21,8 @@ package me.lucko.spark.sponge; import me.lucko.spark.common.command.sender.AbstractCommandSender; - import net.kyori.adventure.text.Component; import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer; - import org.spongepowered.api.command.CommandSource; import org.spongepowered.api.entity.living.player.Player; import org.spongepowered.api.text.serializer.TextSerializers; diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlatformInfo.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlatformInfo.java index 91d7ea2b..37463b11 100644 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlatformInfo.java +++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlatformInfo.java @@ -21,7 +21,6 @@ package me.lucko.spark.sponge; import me.lucko.spark.common.platform.PlatformInfo; - import org.spongepowered.api.Game; import org.spongepowered.api.Platform; @@ -42,6 +41,11 @@ public String getName() { return "Sponge"; } + @Override + public String getBrand() { + return this.game.getPlatform().getContainer(Platform.Component.IMPLEMENTATION).getName(); + } + @Override public String getVersion() { return this.game.getPlatform().getContainer(Platform.Component.IMPLEMENTATION).getVersion().orElse("unknown"); diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlayerPingProvider.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlayerPingProvider.java index 8f4c15f9..12fff783 100644 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlayerPingProvider.java +++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlayerPingProvider.java @@ -21,9 +21,7 @@ package me.lucko.spark.sponge; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.common.monitor.ping.PlayerPingProvider; - import org.spongepowered.api.Server; import org.spongepowered.api.entity.living.player.Player; diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java index 0e3f4ebf..7cd73f48 100644 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java +++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java @@ -21,7 +21,6 @@ package me.lucko.spark.sponge; import com.google.inject.Inject; - import me.lucko.spark.api.Spark; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; @@ -31,7 +30,6 @@ import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.tick.TickHook; - import org.slf4j.Logger; import org.spongepowered.api.Game; import org.spongepowered.api.command.CommandCallable; @@ -50,14 +48,13 @@ import org.spongepowered.api.world.Location; import org.spongepowered.api.world.World; +import javax.annotation.Nullable; import java.nio.file.Path; import java.util.List; import java.util.Optional; import java.util.logging.Level; import java.util.stream.Stream; -import javax.annotation.Nullable; - @Plugin( id = "spark", name = "spark", diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7TickHook.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7TickHook.java index 26185222..c2b14835 100644 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7TickHook.java +++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7TickHook.java @@ -22,7 +22,6 @@ import me.lucko.spark.common.tick.AbstractTickHook; import me.lucko.spark.common.tick.TickHook; - import org.spongepowered.api.scheduler.Task; public class Sponge7TickHook extends AbstractTickHook implements TickHook, Runnable { diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java index df58028c..fd26bfae 100644 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java +++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java @@ -22,11 +22,9 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; - import me.lucko.spark.common.platform.world.AbstractChunkInfo; import me.lucko.spark.common.platform.world.CountMap; import me.lucko.spark.common.platform.world.WorldInfoProvider; - import org.spongepowered.api.Server; import org.spongepowered.api.entity.Entity; import org.spongepowered.api.entity.EntityType; @@ -34,6 +32,7 @@ import org.spongepowered.api.world.World; import java.util.ArrayList; +import java.util.Collection; import java.util.HashMap; import java.util.List; @@ -78,6 +77,26 @@ public ChunksResult pollChunks() { return data; } + @Override + public GameRulesResult pollGameRules() { + // No way to get defaults info on sponge API 7 :( + + // GameRulesResult data = new GameRulesResult(); + // for (World world : this.server.getWorlds()) { + // for (Map.Entry entry : world.getGameRules().entrySet()) { + // data.put(entry.getKey(), world.getName(), entry.getValue()); + // } + // } + // return data; + + return null; + } + + @Override + public Collection pollDataPacks() { + return null; + } + static final class Sponge7ChunkInfo extends AbstractChunkInfo { private final CountMap entityCounts; diff --git a/spark-sponge8/build.gradle b/spark-sponge8/build.gradle index 9cba862e..a455e414 100644 --- a/spark-sponge8/build.gradle +++ b/spark-sponge8/build.gradle @@ -1,5 +1,5 @@ plugins { - id 'com.github.johnrengelman.shadow' version '8.1.1' + id 'com.gradleup.shadow' version '8.3.0' } dependencies { @@ -39,6 +39,8 @@ shadowJar { exclude 'module-info.class' exclude 'META-INF/maven/**' exclude 'META-INF/proguard/**' + exclude '**/*.proto' + exclude '**/*.proto.bin' } artifacts { diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java index 7f02e755..52218e52 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java +++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java @@ -21,9 +21,7 @@ package me.lucko.spark.sponge; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.common.sampler.source.ClassSourceLookup; - import org.spongepowered.api.Game; import org.spongepowered.plugin.PluginCandidate; import org.spongepowered.plugin.PluginContainer; diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8CommandSender.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8CommandSender.java index e7878dcf..8bf67bde 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8CommandSender.java +++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8CommandSender.java @@ -21,11 +21,9 @@ package me.lucko.spark.sponge; import me.lucko.spark.common.command.sender.AbstractCommandSender; - import net.kyori.adventure.audience.Audience; import net.kyori.adventure.identity.Identity; import net.kyori.adventure.text.Component; - import org.spongepowered.api.command.CommandCause; import org.spongepowered.api.service.permission.Subject; import org.spongepowered.api.util.Identifiable; diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlatformInfo.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlatformInfo.java index 9589ddfb..840a6864 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlatformInfo.java +++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlatformInfo.java @@ -21,9 +21,9 @@ package me.lucko.spark.sponge; import me.lucko.spark.common.platform.PlatformInfo; - import org.spongepowered.api.Game; import org.spongepowered.api.Platform; +import org.spongepowered.plugin.metadata.PluginMetadata; public class Sponge8PlatformInfo implements PlatformInfo { private final Game game; @@ -42,6 +42,12 @@ public String getName() { return "Sponge"; } + @Override + public String getBrand() { + PluginMetadata brandMetadata = this.game.platform().container(Platform.Component.IMPLEMENTATION).metadata(); + return brandMetadata.name().orElseGet(brandMetadata::id); + } + @Override public String getVersion() { return this.game.platform().container(Platform.Component.IMPLEMENTATION).metadata().version().toString(); diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlayerPingProvider.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlayerPingProvider.java index 2bcaf6af..e908d0d8 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlayerPingProvider.java +++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlayerPingProvider.java @@ -21,9 +21,7 @@ package me.lucko.spark.sponge; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.common.monitor.ping.PlayerPingProvider; - import org.spongepowered.api.Server; import org.spongepowered.api.entity.living.player.server.ServerPlayer; diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java index 5faf843f..c675a8c1 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java +++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java @@ -22,7 +22,6 @@ import com.google.common.base.Suppliers; import com.google.inject.Inject; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.command.sender.CommandSender; @@ -33,9 +32,7 @@ import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.sampler.source.SourceMetadata; import me.lucko.spark.common.tick.TickHook; - import net.kyori.adventure.text.Component; - import org.apache.logging.log4j.Logger; import org.checkerframework.checker.nullness.qual.NonNull; import org.spongepowered.api.Game; @@ -188,7 +185,8 @@ public Collection getKnownSources() { plugin -> plugin.metadata().version().toString(), plugin -> plugin.metadata().contributors().stream() .map(PluginContributor::name) - .collect(Collectors.joining(", ")) + .collect(Collectors.joining(", ")), + plugin -> plugin.metadata().description().orElse(null) ); } diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8TickHook.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8TickHook.java index 6db51b8b..6502da28 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8TickHook.java +++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8TickHook.java @@ -22,7 +22,6 @@ import me.lucko.spark.common.tick.AbstractTickHook; import me.lucko.spark.common.tick.TickHook; - import org.spongepowered.api.Game; import org.spongepowered.api.scheduler.ScheduledTask; import org.spongepowered.api.scheduler.Task; diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java index 69b4515d..1bad9d7d 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java +++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java @@ -22,21 +22,24 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; - import me.lucko.spark.common.platform.world.AbstractChunkInfo; import me.lucko.spark.common.platform.world.CountMap; import me.lucko.spark.common.platform.world.WorldInfoProvider; - +import net.kyori.adventure.text.serializer.plain.PlainTextComponentSerializer; import org.spongepowered.api.Server; import org.spongepowered.api.entity.Entity; import org.spongepowered.api.entity.EntityType; import org.spongepowered.api.entity.EntityTypes; import org.spongepowered.api.world.chunk.WorldChunk; +import org.spongepowered.api.world.gamerule.GameRule; +import org.spongepowered.api.world.gamerule.GameRules; import org.spongepowered.api.world.server.ServerWorld; import java.util.ArrayList; +import java.util.Collection; import java.util.HashMap; import java.util.List; +import java.util.stream.Collectors; public class Sponge8WorldInfoProvider implements WorldInfoProvider { private final Server server; @@ -79,6 +82,32 @@ public ChunksResult pollChunks() { return data; } + @Override + public GameRulesResult pollGameRules() { + GameRulesResult data = new GameRulesResult(); + + List> rules = GameRules.registry().stream().collect(Collectors.toList()); + for (GameRule rule : rules) { + data.putDefault(rule.name(), rule.defaultValue().toString()); + for (ServerWorld world : this.server.worldManager().worlds()) { + data.put(rule.name(), world.key().value(), world.properties().gameRule(rule).toString()); + } + } + + return data; + } + + @Override + public Collection pollDataPacks() { + return this.server.packRepository().enabled().stream() + .map(pack -> new DataPackInfo( + pack.id(), + PlainTextComponentSerializer.plainText().serialize(pack.description()), + "unknown" + )) + .collect(Collectors.toList()); + } + static final class Sponge7ChunkInfo extends AbstractChunkInfo> { private final CountMap> entityCounts; diff --git a/spark-velocity/build.gradle b/spark-velocity/build.gradle index ab93a7cf..f0d2a350 100644 --- a/spark-velocity/build.gradle +++ b/spark-velocity/build.gradle @@ -1,6 +1,6 @@ plugins { id 'net.kyori.blossom' version '1.3.0' - id 'com.github.johnrengelman.shadow' version '8.1.1' + id 'com.gradleup.shadow' version '8.3.0' } dependencies { @@ -37,6 +37,8 @@ shadowJar { exclude 'module-info.class' exclude 'META-INF/maven/**' exclude 'META-INF/proguard/**' + exclude '**/*.proto' + exclude '**/*.proto.bin' } artifacts { diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityClassSourceLookup.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityClassSourceLookup.java index 9b697c32..eba00a05 100644 --- a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityClassSourceLookup.java +++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityClassSourceLookup.java @@ -22,9 +22,7 @@ import com.velocitypowered.api.plugin.PluginContainer; import com.velocitypowered.api.plugin.PluginManager; - import me.lucko.spark.common.sampler.source.ClassSourceLookup; - import org.checkerframework.checker.nullness.qual.Nullable; import java.util.HashMap; diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityCommandSender.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityCommandSender.java index ce372a64..62f7f758 100644 --- a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityCommandSender.java +++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityCommandSender.java @@ -23,9 +23,7 @@ import com.velocitypowered.api.command.CommandSource; import com.velocitypowered.api.proxy.ConsoleCommandSource; import com.velocitypowered.api.proxy.Player; - import me.lucko.spark.common.command.sender.AbstractCommandSender; - import net.kyori.adventure.text.Component; import java.util.UUID; diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlatformInfo.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlatformInfo.java index 4ee42cb1..eba1567f 100644 --- a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlatformInfo.java +++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlatformInfo.java @@ -21,7 +21,6 @@ package me.lucko.spark.velocity; import com.velocitypowered.api.proxy.ProxyServer; - import me.lucko.spark.common.platform.PlatformInfo; public class VelocityPlatformInfo implements PlatformInfo { @@ -41,6 +40,11 @@ public String getName() { return "Velocity"; } + @Override + public String getBrand() { + return this.proxy.getVersion().getName(); + } + @Override public String getVersion() { return this.proxy.getVersion().getVersion(); diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlayerPingProvider.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlayerPingProvider.java index 382ea220..6cd4445e 100644 --- a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlayerPingProvider.java +++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlayerPingProvider.java @@ -23,7 +23,6 @@ import com.google.common.collect.ImmutableMap; import com.velocitypowered.api.proxy.Player; import com.velocitypowered.api.proxy.ProxyServer; - import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import java.util.Map; diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java index 4a89a4e8..f31963de 100644 --- a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java +++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java @@ -29,14 +29,12 @@ import com.velocitypowered.api.plugin.Plugin; import com.velocitypowered.api.plugin.annotation.DataDirectory; import com.velocitypowered.api.proxy.ProxyServer; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.sampler.source.SourceMetadata; - import org.slf4j.Logger; import java.nio.file.Path; @@ -89,6 +87,11 @@ public List suggest(Invocation inv) { return this.platform.tabCompleteCommand(new VelocityCommandSender(inv.source()), inv.arguments()); } + @Override + public boolean hasPermission(Invocation inv) { + return this.platform.hasPermissionForAnyCommand(new VelocityCommandSender(inv.source())); + } + @Override public String getVersion() { return VelocitySparkPlugin.class.getAnnotation(Plugin.class).version(); @@ -141,7 +144,8 @@ public Collection getKnownSources() { this.proxy.getPluginManager().getPlugins(), plugin -> plugin.getDescription().getId(), plugin -> plugin.getDescription().getVersion().orElse("unspecified"), - plugin -> String.join(", ", plugin.getDescription().getAuthors()) + plugin -> String.join(", ", plugin.getDescription().getAuthors()), + plugin -> plugin.getDescription().getDescription().orElse(null) ); } diff --git a/spark-velocity4/build.gradle b/spark-velocity4/build.gradle index f242c1ef..a10d73b1 100644 --- a/spark-velocity4/build.gradle +++ b/spark-velocity4/build.gradle @@ -1,6 +1,6 @@ plugins { id 'net.kyori.blossom' version '1.3.0' - id 'com.github.johnrengelman.shadow' version '8.1.1' + id 'com.gradleup.shadow' version '8.3.0' } tasks.withType(JavaCompile) { @@ -42,6 +42,8 @@ shadowJar { exclude 'module-info.class' exclude 'META-INF/maven/**' exclude 'META-INF/proguard/**' + exclude '**/*.proto' + exclude '**/*.proto.bin' } artifacts { diff --git a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4ClassSourceLookup.java b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4ClassSourceLookup.java index 84840d2e..c01e8a15 100644 --- a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4ClassSourceLookup.java +++ b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4ClassSourceLookup.java @@ -22,9 +22,7 @@ import com.velocitypowered.api.plugin.PluginContainer; import com.velocitypowered.api.plugin.PluginManager; - import me.lucko.spark.common.sampler.source.ClassSourceLookup; - import org.checkerframework.checker.nullness.qual.Nullable; import java.util.HashMap; diff --git a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4CommandSender.java b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4CommandSender.java index d346c2ce..11549eeb 100644 --- a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4CommandSender.java +++ b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4CommandSender.java @@ -23,9 +23,7 @@ import com.velocitypowered.api.command.CommandSource; import com.velocitypowered.api.command.ConsoleCommandSource; import com.velocitypowered.api.proxy.connection.Player; - import me.lucko.spark.common.command.sender.AbstractCommandSender; - import net.kyori.adventure.text.Component; import java.util.UUID; diff --git a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlatformInfo.java b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlatformInfo.java index bb2f26b6..86f5b951 100644 --- a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlatformInfo.java +++ b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlatformInfo.java @@ -21,7 +21,6 @@ package me.lucko.spark.velocity; import com.velocitypowered.api.proxy.ProxyServer; - import me.lucko.spark.common.platform.PlatformInfo; public class Velocity4PlatformInfo implements PlatformInfo { @@ -41,6 +40,11 @@ public String getName() { return "Velocity"; } + @Override + public String getBrand() { + return this.proxy.version().name(); + } + @Override public String getVersion() { return this.proxy.version().version(); diff --git a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlayerPingProvider.java b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlayerPingProvider.java index 18f36a7c..32114e51 100644 --- a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlayerPingProvider.java +++ b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlayerPingProvider.java @@ -23,7 +23,6 @@ import com.google.common.collect.ImmutableMap; import com.velocitypowered.api.proxy.ProxyServer; import com.velocitypowered.api.proxy.connection.Player; - import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import java.util.Map; diff --git a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java index b6382468..015deb46 100644 --- a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java +++ b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java @@ -29,14 +29,12 @@ import com.velocitypowered.api.plugin.Plugin; import com.velocitypowered.api.plugin.annotation.DataDirectory; import com.velocitypowered.api.proxy.ProxyServer; - import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.sampler.source.SourceMetadata; - import org.slf4j.Logger; import java.nio.file.Path; @@ -89,6 +87,11 @@ public List suggest(Invocation inv) { return this.platform.tabCompleteCommand(new Velocity4CommandSender(inv.source()), inv.arguments()); } + @Override + public boolean hasPermission(Invocation inv) { + return this.platform.hasPermissionForAnyCommand(new Velocity4CommandSender(inv.source())); + } + @Override public String getVersion() { return Velocity4SparkPlugin.class.getAnnotation(Plugin.class).version(); @@ -141,7 +144,8 @@ public Collection getKnownSources() { this.proxy.pluginManager().plugins(), plugin -> plugin.description().id(), plugin -> plugin.description().version(), - plugin -> String.join(", ", plugin.description().authors()) + plugin -> String.join(", ", plugin.description().authors()), + plugin -> plugin.description().description() ); } diff --git a/spark-waterdog/build.gradle b/spark-waterdog/build.gradle index f90a956b..0f0dee21 100644 --- a/spark-waterdog/build.gradle +++ b/spark-waterdog/build.gradle @@ -1,5 +1,5 @@ plugins { - id 'com.github.johnrengelman.shadow' version '8.1.1' + id 'com.gradleup.shadow' version '8.3.0' } tasks.withType(JavaCompile) { @@ -9,8 +9,9 @@ tasks.withType(JavaCompile) { dependencies { implementation project(':spark-common') - implementation 'net.kyori:adventure-text-serializer-legacy:4.13.1' - compileOnly 'dev.waterdog.waterdogpe:waterdog:1.2.3' + compileOnly('dev.waterdog.waterdogpe:waterdog:1.2.3') { + exclude group: 'com.nukkitx.protocol' + } } repositories { @@ -34,6 +35,7 @@ shadowJar { relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' + relocate 'net.kyori.option', 'me.lucko.spark.lib.adventure.option' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' @@ -44,6 +46,8 @@ shadowJar { exclude 'module-info.class' exclude 'META-INF/maven/**' exclude 'META-INF/proguard/**' + exclude '**/*.proto' + exclude '**/*.proto.bin' } artifacts { diff --git a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogClassSourceLookup.java b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogClassSourceLookup.java index 2207c9ee..8a34983b 100644 --- a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogClassSourceLookup.java +++ b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogClassSourceLookup.java @@ -20,11 +20,10 @@ package me.lucko.spark.waterdog; -import me.lucko.spark.common.sampler.source.ClassSourceLookup; - import dev.waterdog.waterdogpe.ProxyServer; import dev.waterdog.waterdogpe.plugin.Plugin; import dev.waterdog.waterdogpe.plugin.PluginClassLoader; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import java.util.Map; import java.util.WeakHashMap; diff --git a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogCommandSender.java b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogCommandSender.java index 8d7223a3..259e8328 100644 --- a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogCommandSender.java +++ b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogCommandSender.java @@ -20,14 +20,12 @@ package me.lucko.spark.waterdog; +import dev.waterdog.waterdogpe.command.CommandSender; +import dev.waterdog.waterdogpe.player.ProxiedPlayer; import me.lucko.spark.common.command.sender.AbstractCommandSender; - import net.kyori.adventure.text.Component; import net.kyori.adventure.text.serializer.legacy.LegacyComponentSerializer; -import dev.waterdog.waterdogpe.command.CommandSender; -import dev.waterdog.waterdogpe.player.ProxiedPlayer; - import java.util.UUID; public class WaterdogCommandSender extends AbstractCommandSender { diff --git a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlatformInfo.java b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlatformInfo.java index 14b8f607..b32d427b 100644 --- a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlatformInfo.java +++ b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlatformInfo.java @@ -20,9 +20,8 @@ package me.lucko.spark.waterdog; -import me.lucko.spark.common.platform.PlatformInfo; - import dev.waterdog.waterdogpe.WaterdogPE; +import me.lucko.spark.common.platform.PlatformInfo; public class WaterdogPlatformInfo implements PlatformInfo { @@ -36,6 +35,11 @@ public String getName() { return "Waterdog"; } + @Override + public String getBrand() { + return "Waterdog"; + } + @Override public String getVersion() { return WaterdogPE.version().baseVersion(); diff --git a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlayerPingProvider.java b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlayerPingProvider.java index b22325c9..03c9242a 100644 --- a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlayerPingProvider.java +++ b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlayerPingProvider.java @@ -21,11 +21,9 @@ package me.lucko.spark.waterdog; import com.google.common.collect.ImmutableMap; - -import me.lucko.spark.common.monitor.ping.PlayerPingProvider; - import dev.waterdog.waterdogpe.ProxyServer; import dev.waterdog.waterdogpe.player.ProxiedPlayer; +import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import java.util.Map; diff --git a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java index 23aa6a68..f616176a 100644 --- a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java +++ b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java @@ -20,6 +20,10 @@ package me.lucko.spark.waterdog; +import dev.waterdog.waterdogpe.ProxyServer; +import dev.waterdog.waterdogpe.command.Command; +import dev.waterdog.waterdogpe.command.CommandSender; +import dev.waterdog.waterdogpe.plugin.Plugin; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; @@ -27,11 +31,6 @@ import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.sampler.source.SourceMetadata; -import dev.waterdog.waterdogpe.ProxyServer; -import dev.waterdog.waterdogpe.command.Command; -import dev.waterdog.waterdogpe.command.CommandSender; -import dev.waterdog.waterdogpe.plugin.Plugin; - import java.nio.file.Path; import java.util.Collection; import java.util.logging.Level; @@ -108,7 +107,8 @@ public Collection getKnownSources() { getProxy().getPluginManager().getPlugins(), Plugin::getName, plugin -> plugin.getDescription().getVersion(), - plugin -> plugin.getDescription().getAuthor() + plugin -> plugin.getDescription().getAuthor(), + plugin -> null ); }