Skip to content

Commit

Permalink
Add some unit tests
Browse files Browse the repository at this point in the history
  • Loading branch information
lucko committed Jul 29, 2024
1 parent 4c0149b commit 60d54cc
Show file tree
Hide file tree
Showing 45 changed files with 1,905 additions and 32 deletions.
12 changes: 9 additions & 3 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,12 @@ env:

jobs:
build-gradle:
runs-on: ubuntu-latest
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]

runs-on: ${{ matrix.os }}

steps:
- name: Checkout repository
uses: actions/checkout@v3
Expand All @@ -34,11 +39,12 @@ jobs:
- name: Setup Gradle
uses: gradle/gradle-build-action@v2

- name: Run build with Gradle wrapper
run: ./gradlew build
- name: Run build and tests with Gradle wrapper
run: ./gradlew test build

- name: Upload all artifacts
uses: actions/upload-artifact@v3
if: matrix.os == 'ubuntu-latest'
with:
name: jars
path: |
Expand Down
13 changes: 13 additions & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@ plugins {
id 'org.cadixdev.licenser' version '0.6.1' apply false
}

import org.gradle.api.tasks.testing.logging.TestExceptionFormat
import org.gradle.api.tasks.testing.logging.TestLogEvent

allprojects {
group = 'me.lucko'
version = '1.10-SNAPSHOT'
Expand All @@ -25,6 +28,16 @@ subprojects {
options.release = 8
}

tasks.withType(Test).configureEach {
testLogging {
events = [TestLogEvent.PASSED, TestLogEvent.FAILED, TestLogEvent.SKIPPED]
exceptionFormat = TestExceptionFormat.FULL
showExceptions = true
showCauses = true
showStackTraces = true
}
}

processResources {
duplicatesStrategy = DuplicatesStrategy.INCLUDE
}
Expand Down
27 changes: 24 additions & 3 deletions spark-common/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -20,17 +20,17 @@ dependencies {
exclude(module: 'slf4j-api')
}

api('net.kyori:adventure-api:4.13.1') {
api('net.kyori:adventure-api:4.17.0') {
exclude(module: 'adventure-bom')
exclude(module: 'checker-qual')
exclude(module: 'annotations')
}
api('net.kyori:adventure-text-serializer-gson:4.13.1') {
api('net.kyori:adventure-text-serializer-gson:4.17.0') {
exclude(module: 'adventure-bom')
exclude(module: 'adventure-api')
exclude(module: 'gson')
}
api('net.kyori:adventure-text-serializer-legacy:4.13.1') {
api('net.kyori:adventure-text-serializer-legacy:4.17.0') {
exclude(module: 'adventure-bom')
exclude(module: 'adventure-api')
}
Expand All @@ -40,6 +40,22 @@ dependencies {
compileOnly 'com.google.code.gson:gson:2.7'
compileOnly 'com.google.guava:guava:19.0'
compileOnly 'org.checkerframework:checker-qual:3.44.0'

testImplementation 'org.junit.jupiter:junit-jupiter-api:5.11.0-M2'
testImplementation 'org.junit.jupiter:junit-jupiter-engine:5.11.0-M2'
testImplementation 'org.junit.jupiter:junit-jupiter-params:5.11.0-M2'
// testImplementation "org.testcontainers:junit-jupiter:1.19.8"
// testImplementation 'org.mockito:mockito-core:5.12.0'
// testImplementation 'org.mockito:mockito-junit-jupiter:5.12.0'

testImplementation 'com.google.code.gson:gson:2.7'
testImplementation 'com.google.guava:guava:19.0'
testImplementation 'org.checkerframework:checker-qual:3.44.0'

testImplementation('net.kyori:adventure-text-serializer-ansi:4.17.0') {
exclude(module: 'adventure-bom')
exclude(module: 'adventure-api')
}
}

protobuf {
Expand All @@ -56,3 +72,8 @@ protobuf {
}
}
}

test {
useJUnitPlatform {}
systemProperty('net.kyori.ansi.colorLevel', 'indexed16')
}
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,12 @@
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.common.util.BytebinClient;
import me.lucko.spark.common.util.Configuration;
import me.lucko.spark.common.util.SparkStaticLogger;
import me.lucko.spark.common.util.TemporaryFiles;
import me.lucko.spark.common.util.classfinder.ClassFinder;
import me.lucko.spark.common.util.config.Configuration;
import me.lucko.spark.common.util.config.FileConfiguration;
import me.lucko.spark.common.util.config.RuntimeConfiguration;
import me.lucko.spark.common.ws.TrustedKeyStore;
import net.kyori.adventure.text.Component;
import net.kyori.adventure.text.event.ClickEvent;
Expand All @@ -71,6 +73,7 @@
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReentrantLock;
Expand Down Expand Up @@ -121,7 +124,11 @@ public SparkPlatform(SparkPlugin plugin) {
SparkStaticLogger.setLogger(plugin::log);

this.temporaryFiles = new TemporaryFiles(this.plugin.getPluginDirectory().resolve("tmp"));
this.configuration = new Configuration(this.plugin.getPluginDirectory().resolve("config.json"));
this.configuration = Configuration.combining(
RuntimeConfiguration.SYSTEM_PROPERTIES,
RuntimeConfiguration.ENVIRONMENT_VARIABLES,
new FileConfiguration(this.plugin.getPluginDirectory().resolve("config.json"))
);

this.viewerUrl = this.configuration.getString("viewerUrl", "https://spark.lucko.me/");
String bytebinUrl = this.configuration.getString("bytebinUrl", "https://spark-usercontent.lucko.me/");
Expand Down Expand Up @@ -330,7 +337,8 @@ public boolean hasPermissionForAnyCommand(CommandSender sender) {
return !getAvailableCommands(sender).isEmpty();
}

public void executeCommand(CommandSender sender, String[] args) {
public CompletableFuture<Void> executeCommand(CommandSender sender, String[] args) {
CompletableFuture<Void> future = new CompletableFuture<>();
AtomicReference<Thread> executorThread = new AtomicReference<>();
AtomicReference<Thread> timeoutThread = new AtomicReference<>();
AtomicBoolean completed = new AtomicBoolean(false);
Expand All @@ -341,9 +349,11 @@ public void executeCommand(CommandSender sender, String[] args) {
this.commandExecuteLock.lock();
try {
executeCommand0(sender, args);
future.complete(null);
} catch (Exception e) {
this.plugin.log(Level.SEVERE, "Exception occurred whilst executing a spark command");
e.printStackTrace();
future.completeExceptionally(e);
} finally {
this.commandExecuteLock.unlock();
executorThread.set(null);
Expand Down Expand Up @@ -393,6 +403,8 @@ public void executeCommand(CommandSender sender, String[] args) {
timeoutThread.set(null);
}
});

return future;
}

private void executeCommand0(CommandSender sender, String[] args) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import java.util.function.Supplier;

import static net.kyori.adventure.text.Component.empty;
import static net.kyori.adventure.text.Component.space;
Expand Down Expand Up @@ -208,7 +209,7 @@ private void profilerStart(SparkPlatform platform, CommandSender sender, Command
}
}

ThreadGrouper threadGrouper;
Supplier<ThreadGrouper> threadGrouper;
if (arguments.boolFlag("combine-all")) {
threadGrouper = ThreadGrouper.AS_ONE;
} else if (arguments.boolFlag("not-combined")) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

package me.lucko.spark.common.heapdump;

import com.google.common.annotations.VisibleForTesting;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.proto.SparkHeapProtos.HeapData;
Expand Down Expand Up @@ -123,6 +124,11 @@ private HeapDumpSummary(List<Entry> entries) {
this.entries = entries;
}

@VisibleForTesting
List<Entry> getEntries() {
return this.entries;
}

public HeapData toProto(SparkPlatform platform, CommandSender.Data creator) {
HeapMetadata.Builder metadata = HeapMetadata.newBuilder()
.setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
Expand Down Expand Up @@ -186,6 +192,16 @@ public HeapEntry toProto() {
.setType(this.type)
.build();
}

@Override
public String toString() {
return "Entry{" +
"order=" + this.order +
", instances=" + this.instances +
", bytes=" + this.bytes +
", type='" + this.type + '\'' +
'}';
}
}

public interface DiagnosticCommandMXBean {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
/*
* This file is part of spark.
*
* Copyright (c) lucko (Luck) <[email protected]>
* Copyright (c) contributors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/

package me.lucko.spark.common.monitor;

import org.checkerframework.checker.nullness.qual.NonNull;

import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;

/**
* Utility for reading from sysctl on macOS systems.
*/
public enum MacosSysctl {

SYSCTL("sysctl", "-a"),;

private static final boolean SUPPORTED = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", "").equals("macosx");

private final String[] cmdArgs;

MacosSysctl(String... cmdArgs) {
this.cmdArgs = cmdArgs;
}

public @NonNull List<String> read() {
if (SUPPORTED) {
ProcessBuilder process = new ProcessBuilder(this.cmdArgs).redirectErrorStream(true);
try (BufferedReader buf = new BufferedReader(new InputStreamReader(process.start().getInputStream()))) {
List<String> lines = new ArrayList<>();

String line;
while ((line = buf.readLine()) != null) {
lines.add(line);
}

return lines;
} catch (Exception e) {
// ignore
}
}

return Collections.emptyList();
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
package me.lucko.spark.common.monitor.cpu;

import me.lucko.spark.common.monitor.LinuxProc;
import me.lucko.spark.common.monitor.MacosSysctl;
import me.lucko.spark.common.monitor.WindowsWmic;

import java.util.regex.Pattern;
Expand Down Expand Up @@ -52,6 +53,12 @@ public static String queryCpuModel() {
}
}

for (String line : MacosSysctl.SYSCTL.read()) {
if (line.startsWith("machdep.cpu.brand_string:")) {
return line.substring("machdep.cpu.brand_string:".length()).trim();
}
}

return "";
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

package me.lucko.spark.common.monitor.net;

import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableMap;
import me.lucko.spark.common.monitor.LinuxProc;
import org.checkerframework.checker.nullness.qual.NonNull;
Expand Down Expand Up @@ -200,7 +201,8 @@ public NetworkInterfaceInfo subtract(NetworkInterfaceInfo other) {

private static final Pattern PROC_NET_DEV_PATTERN = Pattern.compile("^\\s*(\\w+):([\\d\\s]+)$");

private static @NonNull Map<String, NetworkInterfaceInfo> read(List<String> output) {
@VisibleForTesting
static @NonNull Map<String, NetworkInterfaceInfo> read(List<String> output) {
// Inter-| Receive | Transmit
// face |bytes packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed
// lo: 2776770 11307 0 0 0 0 0 0 2776770 11307 0 0 0 0 0 0
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,9 @@

import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.util.Configuration;
import me.lucko.spark.common.util.config.Configuration;

import java.util.function.Supplier;
import java.util.logging.Level;

public class BackgroundSamplerManager {
Expand Down Expand Up @@ -103,7 +104,7 @@ public boolean restartBackgroundSampler() {
private void startSampler() {
boolean forceJavaEngine = this.configuration.getString(OPTION_ENGINE, "async").equals("java");

ThreadGrouper threadGrouper = ThreadGrouper.parseConfigSetting(this.configuration.getString(OPTION_THREAD_GROUPER, "by-pool"));
Supplier<ThreadGrouper> threadGrouper = ThreadGrouper.parseConfigSetting(this.configuration.getString(OPTION_THREAD_GROUPER, "by-pool"));
ThreadDumper threadDumper = ThreadDumper.parseConfigSetting(this.configuration.getString(OPTION_THREAD_DUMPER, "default"));
if (threadDumper == null) {
threadDumper = this.platform.getPlugin().getDefaultThreadDumper();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import me.lucko.spark.common.tick.TickHook;

import java.util.concurrent.TimeUnit;
import java.util.function.Supplier;

/**
* Builds {@link Sampler} instances.
Expand All @@ -44,7 +45,7 @@ public class SamplerBuilder {
private long autoEndTime = -1;
private boolean background = false;
private ThreadDumper threadDumper = ThreadDumper.ALL;
private ThreadGrouper threadGrouper = ThreadGrouper.BY_NAME;
private Supplier<ThreadGrouper> threadGrouper = ThreadGrouper.BY_NAME;

private int ticksOver = -1;
private TickHook tickHook = null;
Expand Down Expand Up @@ -80,7 +81,7 @@ public SamplerBuilder threadDumper(ThreadDumper threadDumper) {
return this;
}

public SamplerBuilder threadGrouper(ThreadGrouper threadGrouper) {
public SamplerBuilder threadGrouper(Supplier<ThreadGrouper> threadGrouper) {
this.threadGrouper = threadGrouper;
return this;
}
Expand Down Expand Up @@ -131,7 +132,7 @@ public Sampler start(SparkPlatform platform) throws UnsupportedOperationExceptio
this.samplingInterval
);

SamplerSettings settings = new SamplerSettings(interval, this.threadDumper, this.threadGrouper, this.autoEndTime, this.background);
SamplerSettings settings = new SamplerSettings(interval, this.threadDumper, this.threadGrouper.get(), this.autoEndTime, this.background);

Sampler sampler;
if (this.mode == SamplerMode.ALLOCATION) {
Expand Down
Loading

0 comments on commit 60d54cc

Please sign in to comment.