Skip to content

Commit

Permalink
Merge branch 'lucko:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
smartcmd authored Oct 15, 2024
2 parents a19a301 + 6effd0d commit a4c62de
Show file tree
Hide file tree
Showing 73 changed files with 1,663 additions and 719 deletions.
6 changes: 5 additions & 1 deletion spark-bukkit/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,14 @@ plugins {
id 'com.gradleup.shadow' version '8.3.0'
}

java {
disableAutoTargetJvm()
}

dependencies {
implementation project(':spark-common')
implementation 'net.kyori:adventure-platform-bukkit:4.3.3'
compileOnly 'com.destroystokyo.paper:paper-api:1.16.4-R0.1-SNAPSHOT'
compileOnly 'io.papermc.paper:paper-api:1.21.1-R0.1-SNAPSHOT'

// placeholders
compileOnly 'me.clip:placeholderapi:2.10.3'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,8 @@ public Collection<SourceMetadata> getKnownSources() {
Arrays.asList(getServer().getPluginManager().getPlugins()),
Plugin::getName,
plugin -> plugin.getDescription().getVersion(),
plugin -> String.join(", ", plugin.getDescription().getAuthors())
plugin -> String.join(", ", plugin.getDescription().getAuthors()),
plugin -> plugin.getDescription().getDescription()
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,16 @@
import org.bukkit.entity.EntityType;

import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Locale;
import java.util.Objects;
import java.util.stream.Collectors;

public class BukkitWorldInfoProvider implements WorldInfoProvider {
private static final boolean SUPPORTS_PAPER_COUNT_METHODS;
private static final boolean SUPPORTS_GAMERULES;
private static final boolean SUPPORTS_DATAPACKS;

static {
boolean supportsPaperCountMethods = false;
Expand All @@ -59,8 +63,17 @@ public class BukkitWorldInfoProvider implements WorldInfoProvider {
// ignored
}

boolean supportsDataPacks = false;
try {
Server.class.getMethod("getDataPackManager");
supportsDataPacks = true;
} catch (Exception e) {
// ignored
}

SUPPORTS_PAPER_COUNT_METHODS = supportsPaperCountMethods;
SUPPORTS_GAMERULES = supportsGameRules;
SUPPORTS_DATAPACKS = supportsDataPacks;
}

private final Server server;
Expand Down Expand Up @@ -155,6 +168,22 @@ public GameRulesResult pollGameRules() {
return data;
}

@SuppressWarnings("removal")
@Override
public Collection<DataPackInfo> pollDataPacks() {
if (!SUPPORTS_DATAPACKS) {
return null;
}

return this.server.getDataPackManager().getDataPacks().stream()
.map(pack -> new DataPackInfo(
pack.getTitle(),
pack.getDescription(),
pack.getSource().name().toLowerCase(Locale.ROOT).replace("_", "")
))
.collect(Collectors.toList());
}

static final class BukkitChunkInfo extends AbstractChunkInfo<EntityType> {
private final CountMap<EntityType> entityCounts;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,8 @@ public Collection<SourceMetadata> getKnownSources() {
getProxy().getPluginManager().getPlugins(),
plugin -> plugin.getDescription().getName(),
plugin -> plugin.getDescription().getVersion(),
plugin -> plugin.getDescription().getAuthor()
plugin -> plugin.getDescription().getAuthor(),
plugin -> plugin.getDescription().getDescription()
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,12 +74,14 @@
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReentrantLock;
import java.util.logging.Level;
import java.util.stream.Collectors;
import java.util.stream.Stream;

import static net.kyori.adventure.text.Component.space;
import static net.kyori.adventure.text.Component.text;
Expand Down Expand Up @@ -341,6 +343,15 @@ private List<Command> getAvailableCommands(CommandSender sender) {
.collect(Collectors.toList());
}

public Set<String> getAllSparkPermissions() {
return Stream.concat(
Stream.of("spark"),
this.commands.stream()
.map(Command::primaryAlias)
.map(alias -> "spark." + alias)
).collect(Collectors.toSet());
}

public boolean hasPermissionForAnyCommand(CommandSender sender) {
return !getAvailableCommands(sender).isEmpty();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,12 +37,11 @@
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.sampler.ThreadGrouper;
import me.lucko.spark.common.sampler.async.AsyncSampler;
import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.java.MergeStrategy;
import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.util.FormatUtil;
import me.lucko.spark.common.util.MediaTypes;
import me.lucko.spark.common.util.MethodDisambiguator;
import me.lucko.spark.common.ws.ViewerSocket;
import me.lucko.spark.proto.SparkSamplerProtos;
import net.kyori.adventure.text.Component;
Expand Down Expand Up @@ -507,12 +506,7 @@ private Sampler.ExportProps getExportProps(SparkPlatform platform, CommandRespon
return new Sampler.ExportProps()
.creator(resp.senderData())
.comment(Iterables.getFirst(arguments.stringFlag("comment"), null))
.mergeMode(() -> {
MethodDisambiguator methodDisambiguator = new MethodDisambiguator(platform.createClassFinder());
return arguments.boolFlag("separate-parent-calls")
? MergeMode.separateParentCalls(methodDisambiguator)
: MergeMode.sameMethod(methodDisambiguator);
})
.mergeStrategy(arguments.boolFlag("separate-parent-calls") ? MergeStrategy.SEPARATE_PARENT_CALLS : MergeStrategy.SAME_METHOD)
.classSourceLookup(() -> ClassSourceLookup.create(platform));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,34 +146,32 @@ public SystemStatistics getSystemStatistics() {
return builder.build();
}

public PlatformStatistics getPlatformStatistics(Map<String, GarbageCollectorStatistics> startingGcStatistics, boolean detailed) {
public PlatformStatistics getPlatformStatistics(Map<String, GarbageCollectorStatistics> startingGcStatistics, boolean includeWorldStatistics) {
PlatformStatistics.Builder builder = PlatformStatistics.newBuilder();

PlatformStatistics.Memory.Builder memory = PlatformStatistics.Memory.newBuilder()
.setHeap(memoryUsageProto(ManagementFactory.getMemoryMXBean().getHeapMemoryUsage()))
.setNonHeap(memoryUsageProto(ManagementFactory.getMemoryMXBean().getNonHeapMemoryUsage()));

if (detailed) {
List<MemoryPoolMXBean> memoryPoolMXBeans = ManagementFactory.getMemoryPoolMXBeans();
for (MemoryPoolMXBean memoryPool : memoryPoolMXBeans) {
if (memoryPool.getType() != MemoryType.HEAP) {
continue;
}
List<MemoryPoolMXBean> memoryPoolMXBeans = ManagementFactory.getMemoryPoolMXBeans();
for (MemoryPoolMXBean memoryPool : memoryPoolMXBeans) {
if (memoryPool.getType() != MemoryType.HEAP) {
continue;
}

MemoryUsage usage = memoryPool.getUsage();
MemoryUsage collectionUsage = memoryPool.getCollectionUsage();
MemoryUsage usage = memoryPool.getUsage();
MemoryUsage collectionUsage = memoryPool.getCollectionUsage();

if (usage.getMax() == -1) {
usage = new MemoryUsage(usage.getInit(), usage.getUsed(), usage.getCommitted(), usage.getCommitted());
}

memory.addPools(PlatformStatistics.Memory.MemoryPool.newBuilder()
.setName(memoryPool.getName())
.setUsage(memoryUsageProto(usage))
.setCollectionUsage(memoryUsageProto(collectionUsage))
.build()
);
if (usage.getMax() == -1) {
usage = new MemoryUsage(usage.getInit(), usage.getUsed(), usage.getCommitted(), usage.getCommitted());
}

memory.addPools(PlatformStatistics.Memory.MemoryPool.newBuilder()
.setName(memoryPool.getName())
.setUsage(memoryUsageProto(usage))
.setCollectionUsage(memoryUsageProto(collectionUsage))
.build()
);
}

builder.setMemory(memory.build());
Expand Down Expand Up @@ -240,7 +238,7 @@ public PlatformStatistics getPlatformStatistics(Map<String, GarbageCollectorStat
: PlatformStatistics.OnlineMode.OFFLINE
);

if (detailed) {
if (includeWorldStatistics) {
try {
WorldStatisticsProvider worldStatisticsProvider = new WorldStatisticsProvider(
new AsyncWorldInfoProvider(this.platform, this.platform.getPlugin().createWorldInfoProvider())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;

import java.util.Collection;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
Expand Down Expand Up @@ -84,6 +85,10 @@ public CompletableFuture<WorldInfoProvider.GameRulesResult> pollGameRules() {
return async(WorldInfoProvider::pollGameRules);
}

public CompletableFuture<Collection<WorldInfoProvider.DataPackInfo>> pollDataPacks() {
return async(WorldInfoProvider::pollDataPacks);
}

public WorldInfoProvider.CountsResult getCounts() {
return get(pollCounts());
}
Expand All @@ -95,4 +100,8 @@ public WorldInfoProvider.ChunksResult<? extends ChunkInfo<?>> getChunks() {
public WorldInfoProvider.GameRulesResult getGameRules() {
return get(pollGameRules());
}

public Collection<WorldInfoProvider.DataPackInfo> getDataPacks() {
return get(pollDataPacks());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

package me.lucko.spark.common.platform.world;

import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
Expand All @@ -44,6 +45,11 @@ public ChunksResult<? extends ChunkInfo<?>> pollChunks() {
public GameRulesResult pollGameRules() {
return null;
}

@Override
public Collection<DataPackInfo> pollDataPacks() {
return null;
}
};

/**
Expand All @@ -67,6 +73,13 @@ public GameRulesResult pollGameRules() {
*/
GameRulesResult pollGameRules();

/**
* Polls for data packs.
*
* @return the data packs
*/
Collection<DataPackInfo> pollDataPacks();

default boolean mustCallSync() {
return true;
}
Expand Down Expand Up @@ -146,4 +159,28 @@ public Map<String, String> getWorldValues() {
}
}

final class DataPackInfo {
private final String name;
private final String description;
private final String source;

public DataPackInfo(String name, String description, String source) {
this.name = name;
this.description = description;
this.source = source;
}

public String name() {
return this.name;
}

public String description() {
return this.description;
}

public String source() {
return this.source;
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import me.lucko.spark.proto.SparkProtos.WorldStatistics;

import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
Expand Down Expand Up @@ -80,6 +81,16 @@ public WorldStatistics getWorldStatistics() {
));
}

Collection<WorldInfoProvider.DataPackInfo> dataPacks = this.provider.getDataPacks();
if (dataPacks != null) {
dataPacks.forEach(dataPack -> stats.addDataPacks(WorldStatistics.DataPack.newBuilder()
.setName(dataPack.name())
.setDescription(dataPack.description())
.setSource(dataPack.source())
.build()
));
}

return stats.build();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,11 @@
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
import me.lucko.spark.common.platform.MetadataProvider;
import me.lucko.spark.common.platform.SparkMetadata;
import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
import me.lucko.spark.common.sampler.aggregator.DataAggregator;
import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.sampler.node.exporter.NodeExporter;
import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import me.lucko.spark.common.sampler.source.SourceMetadata;
import me.lucko.spark.common.sampler.window.ProtoTimeEncoder;
import me.lucko.spark.common.sampler.window.WindowStatisticsCollector;
import me.lucko.spark.common.util.classfinder.ClassFinder;
Expand All @@ -42,10 +39,10 @@
import java.util.Collection;
import java.util.Comparator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.function.Function;
import java.util.function.Supplier;

/**
Expand Down Expand Up @@ -182,6 +179,7 @@ protected void sendStatisticsToSocket() {

protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, CommandSender.Data creator, String comment, DataAggregator dataAggregator) {
SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder()
.setSamplerEngine(getType().asProto())
.setSamplerMode(getMode().asProto())
.setStartTime(this.startTime)
.setInterval(this.interval)
Expand All @@ -202,7 +200,7 @@ protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform pla
proto.setMetadata(metadata);
}

protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, MergeMode mergeMode, ClassSourceLookup classSourceLookup, Supplier<ClassFinder> classFinderSupplier) {
protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, Function<ProtoTimeEncoder, NodeExporter> nodeExporterFunction, ClassSourceLookup classSourceLookup, Supplier<ClassFinder> classFinderSupplier) {
List<ThreadNode> data = dataAggregator.exportData();
data.sort(Comparator.comparing(ThreadNode::getThreadLabel));

Expand All @@ -217,8 +215,10 @@ protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAg
this.windowStatisticsCollector.ensureHasStatisticsForAllWindows(timeWindows);
proto.putAllTimeWindowStatistics(this.windowStatisticsCollector.export());

NodeExporter exporter = nodeExporterFunction.apply(timeEncoder);

for (ThreadNode entry : data) {
proto.addThreads(entry.toProto(mergeMode, timeEncoder));
proto.addThreads(exporter.export(entry));
classSourceVisitor.visit(entry);
}

Expand Down
Loading

0 comments on commit a4c62de

Please sign in to comment.