Some Netty optimizations

This commit is contained in:
Nanit
2020-11-29 15:00:17 +02:00
parent b4b40d36e0
commit 7ba831d1a6
10 changed files with 905 additions and 849 deletions

View File

@@ -1,8 +1,15 @@
package ru.nanit.limbo.server;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.ServerChannel;
import io.netty.channel.epoll.Epoll;
import io.netty.channel.epoll.EpollEventLoopGroup;
import io.netty.channel.epoll.EpollServerSocketChannel;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.util.ResourceLeakDetector;
import napi.configurate.serializing.NodeSerializers;
import ru.nanit.limbo.configuration.LimboConfig;
import ru.nanit.limbo.configuration.SocketAddressSerializer;
@@ -14,8 +21,7 @@ import ru.nanit.limbo.world.DimensionRegistry;
import java.net.SocketAddress;
import java.nio.file.Paths;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
public final class LimboServer {
@@ -23,6 +29,10 @@ public final class LimboServer {
private LimboConfig config;
private Connections connections;
private DimensionRegistry dimensionRegistry;
private ScheduledFuture<?> keepAliveTask;
private EventLoopGroup bossGroup;
private EventLoopGroup workerGroup;
public LimboConfig getConfig(){
return config;
@@ -39,6 +49,8 @@ public final class LimboServer {
public void start() throws Exception {
Logger.info("Starting server...");
ResourceLeakDetector.setLevel(ResourceLeakDetector.Level.DISABLED);
NodeSerializers.register(SocketAddress.class, new SocketAddressSerializer());
NodeSerializers.register(InfoForwarding.class, new InfoForwarding.Serializer());
NodeSerializers.register(PingData.class, new PingData.Serializer());
@@ -56,21 +68,55 @@ public final class LimboServer {
ClientConnection.preInitPackets(this);
ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor();
executor.scheduleAtFixedRate(this::broadcastKeepAlive, 0L, 5L, TimeUnit.SECONDS);
startBootstrap();
new ServerBootstrap()
.group(new NioEventLoopGroup(), new NioEventLoopGroup())
.channel(NioServerSocketChannel.class)
.childHandler(new ClientChannelInitializer(this))
.localAddress(config.getAddress())
.bind();
keepAliveTask = workerGroup.scheduleAtFixedRate(this::broadcastKeepAlive, 0L, 5L, TimeUnit.SECONDS);
Runtime.getRuntime().addShutdownHook(new Thread(this::stop, "NanoLimbo shutdown thread"));
Logger.info("Server started on %s", config.getAddress());
}
private void startBootstrap(){
Class<? extends ServerChannel> channelClass;
if (Epoll.isAvailable()){
bossGroup = new EpollEventLoopGroup(1);
workerGroup = new EpollEventLoopGroup(4);
channelClass = EpollServerSocketChannel.class;
Logger.debug("Using Epoll transport type");
} else {
bossGroup = new NioEventLoopGroup(1);
workerGroup = new NioEventLoopGroup(4);
channelClass = NioServerSocketChannel.class;
Logger.debug("Using Java NIO transport type");
}
new ServerBootstrap()
.group(bossGroup, workerGroup)
.channel(channelClass)
.childHandler(new ClientChannelInitializer(this))
.childOption(ChannelOption.TCP_NODELAY, true)
.localAddress(config.getAddress())
.bind();
}
private void broadcastKeepAlive(){
connections.getAllConnections().forEach(ClientConnection::sendKeepAlive);
}
private void stop(){
if (keepAliveTask != null){
keepAliveTask.cancel(true);
}
if (bossGroup != null){
bossGroup.shutdownGracefully();
}
if (workerGroup != null){
workerGroup.shutdownGracefully();
}
}
}